diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java index 550aee7847..de52fbb00e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java @@ -79,8 +79,6 @@ import com.google.common.base.Charsets; import org.apache.commons.collections.map.UnmodifiableMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; @@ -98,6 +96,8 @@ import org.apache.hadoop.util.StringUtils; import org.codehaus.stax2.XMLInputFactory2; import org.codehaus.stax2.XMLStreamReader2; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; @@ -192,11 +192,12 @@ @InterfaceStability.Stable public class Configuration implements Iterable>, Writable { - private static final Log LOG = - LogFactory.getLog(Configuration.class); + private static final Logger LOG = + LoggerFactory.getLogger(Configuration.class); - private static final Log LOG_DEPRECATION = - LogFactory.getLog("org.apache.hadoop.conf.Configuration.deprecation"); + private static final Logger LOG_DEPRECATION = + LoggerFactory.getLogger( + "org.apache.hadoop.conf.Configuration.deprecation"); private boolean quietmode = true; @@ -2885,10 +2886,10 @@ private Resource loadResource(Properties properties, } return null; } catch (IOException e) { - LOG.fatal("error parsing conf " + name, e); + LOG.error("error parsing conf " + name, e); throw new RuntimeException(e); } catch (XMLStreamException e) { - LOG.fatal("error parsing conf " + name, e); + LOG.error("error parsing conf " + name, e); throw new RuntimeException(e); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java index bdd006dfa7..146c6d844f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java @@ -22,9 +22,10 @@ import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.collect.Maps; -import org.apache.commons.logging.*; import org.apache.hadoop.util.Time; import org.apache.hadoop.conf.ReconfigurationUtil.PropertyChange; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.Collection; @@ -41,8 +42,8 @@ public abstract class ReconfigurableBase extends Configured implements Reconfigurable { - private static final Log LOG = - LogFactory.getLog(ReconfigurableBase.class); + private static final Logger LOG = + LoggerFactory.getLogger(ReconfigurableBase.class); // Use for testing purpose. private ReconfigurationUtil reconfigurationUtil = new ReconfigurationUtil(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java index bb221ee361..5a616f72b9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java @@ -18,8 +18,6 @@ package org.apache.hadoop.conf; -import org.apache.commons.logging.*; - import org.apache.commons.lang.StringEscapeUtils; import java.util.Collection; @@ -33,6 +31,8 @@ import javax.servlet.http.HttpServletResponse; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A servlet for changing a node's configuration. @@ -45,8 +45,8 @@ public class ReconfigurationServlet extends HttpServlet { private static final long serialVersionUID = 1L; - private static final Log LOG = - LogFactory.getLog(ReconfigurationServlet.class); + private static final Logger LOG = + LoggerFactory.getLogger(ReconfigurationServlet.class); // the prefix used to fing the attribute holding the reconfigurable // for a given request diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/JceAesCtrCryptoCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/JceAesCtrCryptoCodec.java index 61ee743c42..de0e5dd626 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/JceAesCtrCryptoCodec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/JceAesCtrCryptoCodec.java @@ -26,12 +26,12 @@ import javax.crypto.spec.IvParameterSpec; import javax.crypto.spec.SecretKeySpec; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import com.google.common.base.Preconditions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_KEY; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_JAVA_SECURE_RANDOM_ALGORITHM_KEY; @@ -42,8 +42,8 @@ */ @InterfaceAudience.Private public class JceAesCtrCryptoCodec extends AesCtrCryptoCodec { - private static final Log LOG = - LogFactory.getLog(JceAesCtrCryptoCodec.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(JceAesCtrCryptoCodec.class.getName()); private Configuration conf; private String provider; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslAesCtrCryptoCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslAesCtrCryptoCodec.java index d08e58882c..8d01f42095 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslAesCtrCryptoCodec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslAesCtrCryptoCodec.java @@ -26,22 +26,22 @@ import java.security.SecureRandom; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import com.google.common.base.Preconditions; import org.apache.hadoop.crypto.random.OsSecureRandom; import org.apache.hadoop.util.ReflectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Implement the AES-CTR crypto codec using JNI into OpenSSL. */ @InterfaceAudience.Private public class OpensslAesCtrCryptoCodec extends AesCtrCryptoCodec { - private static final Log LOG = - LogFactory.getLog(OpensslAesCtrCryptoCodec.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(OpensslAesCtrCryptoCodec.class.getName()); private Configuration conf; private Random random; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java index 6a03bb608e..133a9f9110 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java @@ -26,13 +26,13 @@ import javax.crypto.NoSuchPaddingException; import javax.crypto.ShortBufferException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.util.NativeCodeLoader; import com.google.common.base.Preconditions; import org.apache.hadoop.util.PerformanceAdvisory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * OpenSSL cipher using JNI. @@ -41,8 +41,8 @@ */ @InterfaceAudience.Private public final class OpensslCipher { - private static final Log LOG = - LogFactory.getLog(OpensslCipher.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(OpensslCipher.class.getName()); public static final int ENCRYPT_MODE = 1; public static final int DECRYPT_MODE = 0; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OpensslSecureRandom.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OpensslSecureRandom.java index 6c53a0a217..1219bf9cc2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OpensslSecureRandom.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OpensslSecureRandom.java @@ -19,13 +19,13 @@ import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.util.NativeCodeLoader; import com.google.common.base.Preconditions; import org.apache.hadoop.util.PerformanceAdvisory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * OpenSSL secure random using JNI. @@ -44,8 +44,8 @@ @InterfaceAudience.Private public class OpensslSecureRandom extends Random { private static final long serialVersionUID = -7828193502768789584L; - private static final Log LOG = - LogFactory.getLog(OpensslSecureRandom.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(OpensslSecureRandom.class.getName()); /** If native SecureRandom unavailable, use java SecureRandom */ private java.security.SecureRandom fallback = null; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java index 9428b98175..66715916f5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java @@ -23,12 +23,12 @@ import java.io.IOException; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.IOUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_SECURE_RANDOM_DEVICE_FILE_PATH_KEY; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_SECURE_RANDOM_DEVICE_FILE_PATH_DEFAULT; @@ -39,7 +39,8 @@ */ @InterfaceAudience.Private public class OsSecureRandom extends Random implements Closeable, Configurable { - public static final Log LOG = LogFactory.getLog(OsSecureRandom.class); + public static final Logger LOG = + LoggerFactory.getLogger(OsSecureRandom.class); private static final long serialVersionUID = 6391500337172057900L; @@ -112,7 +113,7 @@ synchronized protected int next(int nbits) { @Override synchronized public void close() { if (stream != null) { - IOUtils.cleanup(LOG, stream); + IOUtils.cleanupWithLogger(LOG, stream); stream = null; } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java index ef68437274..9bea8f9137 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java @@ -32,8 +32,6 @@ import java.util.StringTokenizer; import java.util.concurrent.ConcurrentHashMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -52,6 +50,8 @@ import org.apache.hadoop.util.Progressable; import com.google.common.annotations.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class provides an interface for implementors of a Hadoop file system @@ -66,7 +66,7 @@ @InterfaceAudience.Public @InterfaceStability.Stable public abstract class AbstractFileSystem { - static final Log LOG = LogFactory.getLog(AbstractFileSystem.class); + static final Logger LOG = LoggerFactory.getLogger(AbstractFileSystem.class); /** Recording statistics per a file system class. */ private static final Map diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java index 0a8cc73713..75622ad374 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java @@ -27,14 +27,14 @@ import java.util.Arrays; import java.util.EnumSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.Options.ChecksumOpt; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.Progressable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Abstract Checksumed Fs. @@ -110,8 +110,8 @@ private int getSumBufferSize(int bytesPerSum, int bufferSize, Path file) * It verifies that data matches checksums. *******************************************************/ private static class ChecksumFSInputChecker extends FSInputChecker { - public static final Log LOG - = LogFactory.getLog(FSInputChecker.class); + public static final Logger LOG = + LoggerFactory.getLogger(FSInputChecker.class); private static final int HEADER_LENGTH = 8; private ChecksumFs fs; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java index 3542a9b585..09c3a8ad3d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java @@ -26,12 +26,12 @@ import java.util.concurrent.Delayed; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.util.Time; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A daemon thread that waits for the next file system to renew. @@ -39,8 +39,8 @@ @InterfaceAudience.Private public class DelegationTokenRenewer extends Thread { - private static final Log LOG = LogFactory - .getLog(DelegationTokenRenewer.class); + private static final Logger LOG = LoggerFactory + .getLogger(DelegationTokenRenewer.class); /** The renewable interface used by the renewer. */ public interface Renewable { @@ -243,7 +243,7 @@ public void removeRenewAction( LOG.error("Interrupted while canceling token for " + fs.getUri() + "filesystem"); if (LOG.isDebugEnabled()) { - LOG.debug(ie.getStackTrace()); + LOG.debug("Exception in removeRenewAction: ", ie); } } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java index 9b66c950bc..4f06e26fcf 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java @@ -22,11 +22,12 @@ import java.io.InputStream; import java.util.zip.Checksum; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.nio.ByteBuffer; import java.nio.IntBuffer; @@ -37,8 +38,8 @@ @InterfaceAudience.LimitedPrivate({"HDFS"}) @InterfaceStability.Unstable abstract public class FSInputChecker extends FSInputStream { - public static final Log LOG - = LogFactory.getLog(FSInputChecker.class); + public static final Logger LOG = + LoggerFactory.getLogger(FSInputChecker.class); /** The file name from which data is read from */ protected Path file; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java index 160a63dee1..fef968bde0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java @@ -35,8 +35,6 @@ import java.util.TreeSet; import java.util.Map.Entry; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -63,6 +61,8 @@ import com.google.common.base.Preconditions; import org.apache.htrace.core.Tracer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The FileContext class provides an interface for users of the Hadoop @@ -169,7 +169,7 @@ @InterfaceStability.Stable public class FileContext { - public static final Log LOG = LogFactory.getLog(FileContext.class); + public static final Logger LOG = LoggerFactory.getLogger(FileContext.class); /** * Default permission for directory and symlink * In previous versions, this default permission was also used to @@ -332,7 +332,7 @@ public AbstractFileSystem run() throws UnsupportedFileSystemException { } }); } catch (InterruptedException ex) { - LOG.error(ex); + LOG.error(ex.toString()); throw new IOException("Failed to get the AbstractFileSystem for path: " + uri, ex); } @@ -446,7 +446,7 @@ public static FileContext getFileContext(final URI defaultFsUri, } catch (UnsupportedFileSystemException ex) { throw ex; } catch (IOException ex) { - LOG.error(ex); + LOG.error(ex.toString()); throw new RuntimeException(ex); } return getFileContext(defaultAfs, aConf); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java index b656a878e6..eb8a5c3c1c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java @@ -45,8 +45,6 @@ import org.apache.commons.collections.map.CaseInsensitiveMap; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -57,6 +55,8 @@ import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.Shell.ShellCommandExecutor; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A collection of file-processing util methods @@ -65,7 +65,7 @@ @InterfaceStability.Evolving public class FileUtil { - private static final Log LOG = LogFactory.getLog(FileUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(FileUtil.class); /* The error code is defined in winutils to indicate insufficient * privilege to create symbolic links. This value need to keep in @@ -697,7 +697,7 @@ private static void unTarUsingJava(File inFile, File untarDir, entry = tis.getNextTarEntry(); } } finally { - IOUtils.cleanup(LOG, tis, inputStream); + IOUtils.cleanupWithLogger(LOG, tis, inputStream); } } @@ -1287,7 +1287,7 @@ public static String[] createJarWithClassPath(String inputClassPath, Path pwd, bos = new BufferedOutputStream(fos); jos = new JarOutputStream(bos, jarManifest); } finally { - IOUtils.cleanup(LOG, jos, bos, fos); + IOUtils.cleanupWithLogger(LOG, jos, bos, fos); } String[] jarCp = {classPathJar.getCanonicalPath(), unexpandedWildcardClasspath.toString()}; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java index 59d15c2716..721f4df7d0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java @@ -24,8 +24,6 @@ import java.util.LinkedList; import org.apache.commons.lang.WordUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; @@ -39,12 +37,14 @@ import org.apache.hadoop.util.ToolRunner; import org.apache.htrace.core.TraceScope; import org.apache.htrace.core.Tracer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** Provide command line access to a FileSystem. */ @InterfaceAudience.Private public class FsShell extends Configured implements Tool { - static final Log LOG = LogFactory.getLog(FsShell.class); + static final Logger LOG = LoggerFactory.getLogger(FsShell.class); private static final int MAX_LINE_WIDTH = 80; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShellPermissions.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShellPermissions.java index 0a829298ca..76e379c51f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShellPermissions.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShellPermissions.java @@ -22,7 +22,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.permission.ChmodParser; @@ -32,6 +31,7 @@ import org.apache.hadoop.fs.shell.FsCommand; import org.apache.hadoop.fs.shell.PathData; import org.apache.hadoop.util.Shell; +import org.slf4j.Logger; /** * This class is the home for file permissions related commands. @@ -41,7 +41,7 @@ @InterfaceStability.Unstable public class FsShellPermissions extends FsCommand { - static Log LOG = FsShell.LOG; + static final Logger LOG = FsShell.LOG; /** * Register the permission related commands with the factory diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java index 7c69167c3a..ca3db1d98e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java @@ -23,18 +23,19 @@ import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.LogFactory; -import org.apache.commons.logging.Log; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.htrace.core.TraceScope; import org.apache.htrace.core.Tracer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private @InterfaceStability.Unstable class Globber { - public static final Log LOG = LogFactory.getLog(Globber.class.getName()); + public static final Logger LOG = + LoggerFactory.getLogger(Globber.class.getName()); private final FileSystem fs; private final FileContext fc; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java index 6a1e8bd854..4c2fd1be74 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java @@ -17,14 +17,14 @@ */ package org.apache.hadoop.fs; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.Text; import org.apache.hadoop.util.LineReader; import org.apache.hadoop.util.Progressable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.EOFException; import java.io.FileNotFoundException; @@ -50,7 +50,8 @@ public class HarFileSystem extends FileSystem { - private static final Log LOG = LogFactory.getLog(HarFileSystem.class); + private static final Logger LOG = + LoggerFactory.getLogger(HarFileSystem.class); public static final String METADATA_CACHE_ENTRIES_KEY = "fs.har.metadatacache.entries"; public static final int METADATA_CACHE_ENTRIES_DEFAULT = 10; @@ -1173,7 +1174,7 @@ private void parseMetaData() throws IOException { LOG.warn("Encountered exception ", ioe); throw ioe; } finally { - IOUtils.cleanup(LOG, lin, in); + IOUtils.cleanupWithLogger(LOG, lin, in); } FSDataInputStream aIn = fs.open(archiveIndexPath); @@ -1198,7 +1199,7 @@ private void parseMetaData() throws IOException { } } } finally { - IOUtils.cleanup(LOG, aIn); + IOUtils.cleanupWithLogger(LOG, aIn); } } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java index 1ed01ea07f..c1e9d21ecc 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java @@ -23,14 +23,15 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; -import org.apache.commons.logging.*; import org.apache.hadoop.util.*; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.util.DiskChecker.DiskErrorException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.conf.Configuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** An implementation of a round-robin scheme for disk allocation for creating * files. The way it works is that it is kept track what disk was last @@ -245,8 +246,8 @@ int getCurrentDirectoryIndex() { private static class AllocatorPerContext { - private final Log LOG = - LogFactory.getLog(AllocatorPerContext.class); + private static final Logger LOG = + LoggerFactory.getLogger(AllocatorPerContext.class); private Random dirIndexRandomizer = new Random(); private String contextCfgItemName; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java index b7718121ee..49cd600628 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java @@ -19,11 +19,12 @@ import java.io.IOException; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Provides a trash facility which supports pluggable Trash policies. @@ -34,8 +35,8 @@ @InterfaceAudience.Public @InterfaceStability.Stable public class Trash extends Configured { - private static final org.apache.commons.logging.Log LOG = - LogFactory.getLog(Trash.class); + private static final Logger LOG = + LoggerFactory.getLogger(Trash.class); private TrashPolicy trashPolicy; // configured trash policy instance diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java index c65e16ae5f..265e967b01 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java @@ -30,8 +30,6 @@ import java.util.Collection; import java.util.Date; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -41,6 +39,8 @@ import org.apache.hadoop.util.Time; import com.google.common.annotations.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** Provides a trash feature. Files are moved to a user's trash * directory, a subdirectory of their home directory named ".Trash". Files are @@ -54,8 +54,8 @@ @InterfaceAudience.Private @InterfaceStability.Evolving public class TrashPolicyDefault extends TrashPolicy { - private static final Log LOG = - LogFactory.getLog(TrashPolicyDefault.class); + private static final Logger LOG = + LoggerFactory.getLogger(TrashPolicyDefault.class); private static final Path CURRENT = new Path("Current"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java index 5f4c85526c..4c1236baa4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java @@ -25,8 +25,6 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.commons.net.ftp.FTP; import org.apache.commons.net.ftp.FTPClient; import org.apache.commons.net.ftp.FTPFile; @@ -45,6 +43,8 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.util.Progressable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** *

@@ -56,8 +56,8 @@ @InterfaceStability.Stable public class FTPFileSystem extends FileSystem { - public static final Log LOG = LogFactory - .getLog(FTPFileSystem.class); + public static final Logger LOG = LoggerFactory + .getLogger(FTPFileSystem.class); public static final int DEFAULT_BUFFER_SIZE = 1024 * 1024; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java index ddb272498b..73ab5f6019 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java @@ -24,8 +24,6 @@ import java.io.ObjectInputValidation; import java.io.Serializable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -33,6 +31,8 @@ import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableFactories; import org.apache.hadoop.io.WritableFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A class for file/directory permissions. @@ -41,7 +41,7 @@ @InterfaceStability.Stable public class FsPermission implements Writable, Serializable, ObjectInputValidation { - private static final Log LOG = LogFactory.getLog(FsPermission.class); + private static final Logger LOG = LoggerFactory.getLogger(FsPermission.class); private static final long serialVersionUID = 0x2fe08564; static final WritableFactory FACTORY = new WritableFactory() { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java index c7fae7bd5f..de86bab6d3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java @@ -23,19 +23,20 @@ import java.util.Iterator; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.StringUtils; import com.jcraft.jsch.ChannelSftp; import com.jcraft.jsch.JSch; import com.jcraft.jsch.JSchException; import com.jcraft.jsch.Session; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** Concurrent/Multiple Connections. */ class SFTPConnectionPool { - public static final Log LOG = LogFactory.getLog(SFTPFileSystem.class); + public static final Logger LOG = + LoggerFactory.getLogger(SFTPFileSystem.class); // Maximum number of allowed live connections. This doesn't mean we cannot // have more live connections. It means that when we have more // live connections than this threshold, any unused connection will be diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java index 6de69fa5e2..421769d632 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java @@ -26,8 +26,6 @@ import java.util.ArrayList; import java.util.Vector; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -41,11 +39,14 @@ import com.jcraft.jsch.ChannelSftp.LsEntry; import com.jcraft.jsch.SftpATTRS; import com.jcraft.jsch.SftpException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** SFTP FileSystem. */ public class SFTPFileSystem extends FileSystem { - public static final Log LOG = LogFactory.getLog(SFTPFileSystem.class); + public static final Logger LOG = + LoggerFactory.getLogger(SFTPFileSystem.class); private SFTPConnectionPool connectionPool; private URI uri; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java index 4c5cbadbc4..c292cf6a9d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java @@ -27,8 +27,6 @@ import java.util.LinkedList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -36,6 +34,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathNotFoundException; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * An abstract class for the execution of a file system command @@ -59,7 +59,7 @@ abstract public class Command extends Configured { private int depth = 0; protected ArrayList exceptions = new ArrayList(); - private static final Log LOG = LogFactory.getLog(Command.class); + private static final Logger LOG = LoggerFactory.getLogger(Command.class); /** allows stdout to be captured if necessary */ public PrintStream out = System.out; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java index cf95a49eb8..93fd2cf77e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java @@ -26,8 +26,6 @@ import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -47,6 +45,8 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * @@ -141,7 +141,8 @@ public interface ActiveStandbyElectorCallback { @VisibleForTesting protected static final String BREADCRUMB_FILENAME = "ActiveBreadCrumb"; - public static final Log LOG = LogFactory.getLog(ActiveStandbyElector.class); + public static final Logger LOG = + LoggerFactory.getLogger(ActiveStandbyElector.class); private static final int SLEEP_AFTER_FAILURE_TO_BECOME_ACTIVE = 1000; @@ -712,7 +713,7 @@ protected ZooKeeper createZooKeeper() throws IOException { } private void fatalError(String errorMessage) { - LOG.fatal(errorMessage); + LOG.error(errorMessage); reset(); appClient.notifyFatalError(errorMessage); } @@ -824,10 +825,10 @@ private boolean reEstablishSession() { createConnection(); success = true; } catch(IOException e) { - LOG.warn(e); + LOG.warn(e.toString()); sleepFor(5000); } catch(KeeperException e) { - LOG.warn(e); + LOG.warn(e.toString()); sleepFor(5000); } ++connectionRetryCount; @@ -866,7 +867,7 @@ public synchronized void terminateConnection() { try { tempZk.close(); } catch(InterruptedException e) { - LOG.warn(e); + LOG.warn(e.toString()); } zkConnectionState = ConnectionState.TERMINATED; wantToBeInElection = false; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FailoverController.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FailoverController.java index d952e29381..3c05a25957 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FailoverController.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FailoverController.java @@ -19,9 +19,6 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -32,6 +29,8 @@ import org.apache.hadoop.ipc.RPC; import com.google.common.base.Preconditions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The FailOverController is responsible for electing an active service @@ -43,7 +42,8 @@ @InterfaceStability.Evolving public class FailoverController { - private static final Log LOG = LogFactory.getLog(FailoverController.class); + private static final Logger LOG = + LoggerFactory.getLogger(FailoverController.class); private final int gracefulFenceTimeout; private final int rpcTimeoutToNewActive; @@ -252,7 +252,7 @@ public void failover(HAServiceTarget fromSvc, } catch (FailoverFailedException ffe) { msg += ". Failback to " + fromSvc + " failed (" + ffe.getMessage() + ")"; - LOG.fatal(msg); + LOG.error(msg); } } throw new FailoverFailedException(msg, cause); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java index 5eff14c108..9b7d7ba5d1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java @@ -28,8 +28,6 @@ import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.ParseException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; @@ -43,6 +41,8 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A command-line tool for making calls in the HAServiceProtocol. @@ -62,7 +62,7 @@ public abstract class HAAdmin extends Configured implements Tool { * operation, which is why it is not documented in the usage below. */ private static final String FORCEMANUAL = "forcemanual"; - private static final Log LOG = LogFactory.getLog(HAAdmin.class); + private static final Logger LOG = LoggerFactory.getLogger(HAAdmin.class); private int rpcTimeoutForChecks = -1; @@ -449,7 +449,7 @@ protected int runCmd(String[] argv) throws Exception { if (cmdLine.hasOption(FORCEMANUAL)) { if (!confirmForceManual()) { - LOG.fatal("Aborted"); + LOG.error("Aborted"); return -1; } // Instruct the NNs to honor this request even if they're diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java index 24c149c458..a93df75649 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java @@ -23,8 +23,6 @@ import java.util.List; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import static org.apache.hadoop.fs.CommonConfigurationKeys.*; import org.apache.hadoop.ha.HAServiceProtocol; @@ -35,6 +33,8 @@ import org.apache.hadoop.util.Daemon; import com.google.common.base.Preconditions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class is a daemon which runs in a loop, periodically heartbeating @@ -47,7 +47,7 @@ */ @InterfaceAudience.Private public class HealthMonitor { - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( HealthMonitor.class); private Daemon daemon; @@ -283,7 +283,7 @@ private MonitorDaemon() { setUncaughtExceptionHandler(new UncaughtExceptionHandler() { @Override public void uncaughtException(Thread t, Throwable e) { - LOG.fatal("Health monitor failed", e); + LOG.error("Health monitor failed", e); enterState(HealthMonitor.State.HEALTH_MONITOR_FAILED); } }); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java index 1afd93785d..2247a34610 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java @@ -22,8 +22,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -31,6 +29,8 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class parses the configured list of fencing methods, and @@ -61,7 +61,7 @@ public class NodeFencer { private static final Pattern HASH_COMMENT_RE = Pattern.compile("#.*$"); - private static final Log LOG = LogFactory.getLog(NodeFencer.class); + private static final Logger LOG = LoggerFactory.getLogger(NodeFencer.class); /** * Standard fencing methods included with Hadoop. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java index 64cd5a894c..9ae113b0ea 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java @@ -23,8 +23,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configured; import com.google.common.annotations.VisibleForTesting; @@ -272,7 +270,7 @@ private int parseConfiggedPort(String portStr) * Adapter from JSch's logger interface to our log4j */ private static class LogAdapter implements com.jcraft.jsch.Logger { - static final Log LOG = LogFactory.getLog( + static final Logger LOG = LoggerFactory.getLogger( SshFenceByTcpPort.class.getName() + ".jsch"); @Override @@ -285,9 +283,8 @@ public boolean isEnabled(int level) { case com.jcraft.jsch.Logger.WARN: return LOG.isWarnEnabled(); case com.jcraft.jsch.Logger.ERROR: - return LOG.isErrorEnabled(); case com.jcraft.jsch.Logger.FATAL: - return LOG.isFatalEnabled(); + return LOG.isErrorEnabled(); default: return false; } @@ -306,10 +303,8 @@ public void log(int level, String message) { LOG.warn(message); break; case com.jcraft.jsch.Logger.ERROR: - LOG.error(message); - break; case com.jcraft.jsch.Logger.FATAL: - LOG.fatal(message); + LOG.error(message); break; default: break; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java index 055bcaa582..20a4681327 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java @@ -28,8 +28,6 @@ import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; @@ -56,11 +54,13 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.LimitedPrivate("HDFS") public abstract class ZKFailoverController { - static final Log LOG = LogFactory.getLog(ZKFailoverController.class); + static final Logger LOG = LoggerFactory.getLogger(ZKFailoverController.class); public static final String ZK_QUORUM_KEY = "ha.zookeeper.quorum"; private static final String ZK_SESSION_TIMEOUT_KEY = "ha.zookeeper.session-timeout.ms"; @@ -162,7 +162,7 @@ public HAServiceTarget getLocalTarget() { public int run(final String[] args) throws Exception { if (!localTarget.isAutoFailoverEnabled()) { - LOG.fatal("Automatic failover is not enabled for " + localTarget + "." + + LOG.error("Automatic failover is not enabled for " + localTarget + "." + " Please ensure that automatic failover is enabled in the " + "configuration before running the ZK failover controller."); return ERR_CODE_AUTO_FAILOVER_NOT_ENABLED; @@ -184,7 +184,7 @@ public Integer run() { } }); } catch (RuntimeException rte) { - LOG.fatal("The failover controller encounters runtime error: " + rte); + LOG.error("The failover controller encounters runtime error: " + rte); throw (Exception)rte.getCause(); } } @@ -195,7 +195,7 @@ private int doRun(String[] args) try { initZK(); } catch (KeeperException ke) { - LOG.fatal("Unable to start failover controller. Unable to connect " + LOG.error("Unable to start failover controller. Unable to connect " + "to ZooKeeper quorum at " + zkQuorum + ". Please check the " + "configured value for " + ZK_QUORUM_KEY + " and ensure that " + "ZooKeeper is running."); @@ -221,7 +221,7 @@ private int doRun(String[] args) } if (!elector.parentZNodeExists()) { - LOG.fatal("Unable to start failover controller. " + LOG.error("Unable to start failover controller. " + "Parent znode does not exist.\n" + "Run with -formatZK flag to initialize ZooKeeper."); return ERR_CODE_NO_PARENT_ZNODE; @@ -230,7 +230,7 @@ private int doRun(String[] args) try { localTarget.checkFencingConfigured(); } catch (BadFencingConfigurationException e) { - LOG.fatal("Fencing is not configured for " + localTarget + ".\n" + + LOG.error("Fencing is not configured for " + localTarget + ".\n" + "You must configure a fencing method before using automatic " + "failover.", e); return ERR_CODE_NO_FENCER; @@ -376,7 +376,7 @@ private synchronized void mainLoop() throws InterruptedException { } private synchronized void fatalError(String err) { - LOG.fatal("Fatal error occurred:" + err); + LOG.error("Fatal error occurred:" + err); fatalError = err; notifyAll(); } @@ -395,7 +395,7 @@ private synchronized void becomeActive() throws ServiceFailedException { } catch (Throwable t) { String msg = "Couldn't make " + localTarget + " active"; - LOG.fatal(msg, t); + LOG.error(msg, t); recordActiveAttempt(new ActiveAttemptRecord(false, msg + "\n" + StringUtils.stringifyException(t))); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolServerSideTranslatorPB.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolServerSideTranslatorPB.java index 63bfbcafdf..7f755825e9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolServerSideTranslatorPB.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolServerSideTranslatorPB.java @@ -19,8 +19,6 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.ha.HAServiceProtocol; @@ -42,6 +40,8 @@ import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class is used on the server side. Calls come across the wire for the @@ -61,7 +61,7 @@ public class HAServiceProtocolServerSideTranslatorPB implements TransitionToActiveResponseProto.newBuilder().build(); private static final TransitionToStandbyResponseProto TRANSITION_TO_STANDBY_RESP = TransitionToStandbyResponseProto.newBuilder().build(); - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( HAServiceProtocolServerSideTranslatorPB.class); public HAServiceProtocolServerSideTranslatorPB(HAServiceProtocol server) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java index d7436b2282..28b9bb0f03 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java @@ -53,8 +53,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.sun.jersey.spi.container.servlet.ServletContainer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -103,6 +101,8 @@ import org.eclipse.jetty.util.ssl.SslContextFactory; import org.eclipse.jetty.util.thread.QueuedThreadPool; import org.eclipse.jetty.webapp.WebAppContext; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Create a Jetty embedded server to answer http requests. The primary goal is @@ -117,7 +117,7 @@ @InterfaceAudience.Private @InterfaceStability.Evolving public final class HttpServer2 implements FilterContainer { - public static final Log LOG = LogFactory.getLog(HttpServer2.class); + public static final Logger LOG = LoggerFactory.getLogger(HttpServer2.class); public static final String HTTP_SCHEME = "http"; public static final String HTTPS_SCHEME = "https"; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/lib/StaticUserWebFilter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/lib/StaticUserWebFilter.java index 9ca5b927df..fc64697bb8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/lib/StaticUserWebFilter.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/lib/StaticUserWebFilter.java @@ -29,11 +29,11 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequestWrapper; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.http.FilterContainer; import org.apache.hadoop.http.FilterInitializer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.servlet.Filter; @@ -47,7 +47,8 @@ public class StaticUserWebFilter extends FilterInitializer { static final String DEPRECATED_UGI_KEY = "dfs.web.ugi"; - private static final Log LOG = LogFactory.getLog(StaticUserWebFilter.class); + private static final Logger LOG = + LoggerFactory.getLogger(StaticUserWebFilter.class); static class User implements Principal { private final String name; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java index d4514c65bd..519fcd74cb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java @@ -22,8 +22,6 @@ import java.io.DataOutputStream; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -36,6 +34,8 @@ import org.apache.hadoop.util.bloom.Filter; import org.apache.hadoop.util.bloom.Key; import org.apache.hadoop.util.hash.Hash; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_MAPFILE_BLOOM_ERROR_RATE_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_MAPFILE_BLOOM_ERROR_RATE_KEY; @@ -52,7 +52,7 @@ @InterfaceAudience.Public @InterfaceStability.Stable public class BloomMapFile { - private static final Log LOG = LogFactory.getLog(BloomMapFile.class); + private static final Logger LOG = LoggerFactory.getLogger(BloomMapFile.class); public static final String BLOOM_FILE_NAME = "bloom"; public static final int HASH_COUNT = 5; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java index 705678ec18..a2903f89b9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java @@ -22,11 +22,10 @@ import java.security.AccessController; import java.security.PrivilegedAction; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import sun.misc.Unsafe; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import com.google.common.primitives.Longs; import com.google.common.primitives.UnsignedBytes; @@ -36,7 +35,7 @@ * class to be able to compare arrays that start at non-zero offsets. */ abstract class FastByteComparisons { - static final Log LOG = LogFactory.getLog(FastByteComparisons.class); + static final Logger LOG = LoggerFactory.getLogger(FastByteComparisons.class); /** * Lexicographically compare two byte arrays. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java index a3bccef398..15744310ea 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java @@ -32,13 +32,12 @@ import java.util.List; import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.Shell; import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY; @@ -49,7 +48,7 @@ @InterfaceAudience.Public @InterfaceStability.Evolving public class IOUtils { - public static final Log LOG = LogFactory.getLog(IOUtils.class); + public static final Logger LOG = LoggerFactory.getLogger(IOUtils.class); /** * Copies from one stream to another. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java index 908a8931a1..2e214445a6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java @@ -23,8 +23,6 @@ import java.util.ArrayList; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -37,6 +35,8 @@ import org.apache.hadoop.util.Options; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.ReflectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_MAP_INDEX_SKIP_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_MAP_INDEX_SKIP_KEY; @@ -60,7 +60,7 @@ @InterfaceAudience.Public @InterfaceStability.Stable public class MapFile { - private static final Log LOG = LogFactory.getLog(MapFile.class); + private static final Logger LOG = LoggerFactory.getLogger(MapFile.class); /** The name of the index file. */ public static final String INDEX_FILE_NAME = "index"; @@ -1002,7 +1002,7 @@ public static void main(String[] args) throws Exception { while (reader.next(key, value)) // copy all entries writer.append(key, value); } finally { - IOUtils.cleanup(LOG, writer, reader); + IOUtils.cleanupWithLogger(LOG, writer, reader); } } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java index a8c06902b1..2e65f12cc0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java @@ -23,8 +23,6 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.io.nativeio.NativeIO; @@ -33,6 +31,8 @@ import com.google.common.base.Preconditions; import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Manages a pool of threads which can issue readahead requests on file descriptors. @@ -40,7 +40,7 @@ @InterfaceAudience.Private @InterfaceStability.Evolving public class ReadaheadPool { - static final Log LOG = LogFactory.getLog(ReadaheadPool.class); + static final Logger LOG = LoggerFactory.getLogger(ReadaheadPool.class); private static final int POOL_SIZE = 4; private static final int MAX_POOL_SIZE = 16; private static final int CAPACITY = 1024; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java index 475d272e94..2cc0e40aac 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java @@ -25,7 +25,6 @@ import java.security.MessageDigest; import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.*; import org.apache.hadoop.util.Options; import org.apache.hadoop.fs.*; import org.apache.hadoop.fs.Options.CreateOpts; @@ -51,6 +50,8 @@ import org.apache.hadoop.util.MergeSort; import org.apache.hadoop.util.PriorityQueue; import org.apache.hadoop.util.Time; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY; @@ -203,7 +204,7 @@ @InterfaceAudience.Public @InterfaceStability.Stable public class SequenceFile { - private static final Log LOG = LogFactory.getLog(SequenceFile.class); + private static final Logger LOG = LoggerFactory.getLogger(SequenceFile.class); private SequenceFile() {} // no public ctor @@ -1923,7 +1924,7 @@ private void initialize(Path filename, FSDataInputStream in, succeeded = true; } finally { if (!succeeded) { - IOUtils.cleanup(LOG, this.in); + IOUtils.cleanupWithLogger(LOG, this.in); } } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java index 89f1e428bb..f5d33a1300 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java @@ -25,9 +25,10 @@ import org.apache.hadoop.util.StringUtils; -import org.apache.commons.logging.*; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** A WritableComparable for strings that uses the UTF8 encoding. * @@ -42,7 +43,7 @@ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Stable public class UTF8 implements WritableComparable { - private static final Log LOG= LogFactory.getLog(UTF8.class); + private static final Logger LOG= LoggerFactory.getLogger(UTF8.class); private static final DataInputBuffer IBUF = new DataInputBuffer(); private static final ThreadLocal OBUF_FACTORY = diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java index 01bffa78a1..f103aad4e8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java @@ -23,8 +23,6 @@ import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -33,6 +31,8 @@ import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A global compressor/decompressor pool used to save and reuse @@ -41,7 +41,7 @@ @InterfaceAudience.Public @InterfaceStability.Evolving public class CodecPool { - private static final Log LOG = LogFactory.getLog(CodecPool.class); + private static final Logger LOG = LoggerFactory.getLogger(CodecPool.class); /** * A global compressor pool used to save the expensive diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java index 8fff75d01d..3701f2026a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java @@ -19,8 +19,6 @@ import java.util.*; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -28,6 +26,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A factory that will find the correct codec for a given filename. @@ -36,8 +36,8 @@ @InterfaceStability.Evolving public class CompressionCodecFactory { - public static final Log LOG = - LogFactory.getLog(CompressionCodecFactory.class.getName()); + public static final Logger LOG = + LoggerFactory.getLogger(CompressionCodecFactory.class.getName()); private static final ServiceLoader CODEC_PROVIDERS = ServiceLoader.load(CompressionCodec.class); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java index 31196cc728..33f39ef929 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java @@ -22,14 +22,14 @@ import java.io.InputStream; import java.io.OutputStream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.zlib.ZlibDecompressor; import org.apache.hadoop.io.compress.zlib.ZlibFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY; @@ -37,7 +37,7 @@ @InterfaceAudience.Public @InterfaceStability.Evolving public class DefaultCodec implements Configurable, CompressionCodec, DirectDecompressionCodec { - private static final Log LOG = LogFactory.getLog(DefaultCodec.class); + private static final Logger LOG = LoggerFactory.getLogger(DefaultCodec.class); Configuration conf; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java index a973dc9334..d4a9787a4a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java @@ -24,9 +24,8 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.Compressor; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link Compressor} based on the popular @@ -42,7 +41,8 @@ public class Bzip2Compressor implements Compressor { static final int DEFAULT_BLOCK_SIZE = 9; static final int DEFAULT_WORK_FACTOR = 30; - private static final Log LOG = LogFactory.getLog(Bzip2Compressor.class); + private static final Logger LOG = + LoggerFactory.getLogger(Bzip2Compressor.class); private long stream; private int blockSize; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java index 3135165e87..96693ad30d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java @@ -23,9 +23,8 @@ import java.nio.ByteBuffer; import org.apache.hadoop.io.compress.Decompressor; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link Decompressor} based on the popular @@ -36,7 +35,8 @@ public class Bzip2Decompressor implements Decompressor { private static final int DEFAULT_DIRECT_BUFFER_SIZE = 64*1024; - private static final Log LOG = LogFactory.getLog(Bzip2Decompressor.class); + private static final Logger LOG = + LoggerFactory.getLogger(Bzip2Decompressor.class); private long stream; private boolean conserveMemory; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java index 45f1edda9a..d24b4bf2a6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java @@ -18,12 +18,12 @@ package org.apache.hadoop.io.compress.bzip2; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.io.compress.Decompressor; import org.apache.hadoop.util.NativeCodeLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A collection of factories to create the right @@ -31,7 +31,7 @@ * */ public class Bzip2Factory { - private static final Log LOG = LogFactory.getLog(Bzip2Factory.class); + private static final Logger LOG = LoggerFactory.getLogger(Bzip2Factory.class); private static String bzip2LibraryName = ""; private static boolean nativeBzip2Loaded; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Compressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Compressor.java index ccfae8b3c3..3792c365b4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Compressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Compressor.java @@ -22,19 +22,19 @@ import java.nio.Buffer; import java.nio.ByteBuffer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.util.NativeCodeLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link Compressor} based on the lz4 compression algorithm. * http://code.google.com/p/lz4/ */ public class Lz4Compressor implements Compressor { - private static final Log LOG = - LogFactory.getLog(Lz4Compressor.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(Lz4Compressor.class.getName()); private static final int DEFAULT_DIRECT_BUFFER_SIZE = 64 * 1024; private int directBufferSize; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java index 685956cc1b..f26ae8481c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java @@ -22,18 +22,18 @@ import java.nio.Buffer; import java.nio.ByteBuffer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.io.compress.Decompressor; import org.apache.hadoop.util.NativeCodeLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link Decompressor} based on the lz4 compression algorithm. * http://code.google.com/p/lz4/ */ public class Lz4Decompressor implements Decompressor { - private static final Log LOG = - LogFactory.getLog(Lz4Compressor.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(Lz4Compressor.class.getName()); private static final int DEFAULT_DIRECT_BUFFER_SIZE = 64 * 1024; private int directBufferSize; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java index 814718d99e..3d386800e4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java @@ -22,19 +22,19 @@ import java.nio.Buffer; import java.nio.ByteBuffer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.util.NativeCodeLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link Compressor} based on the snappy compression algorithm. * http://code.google.com/p/snappy/ */ public class SnappyCompressor implements Compressor { - private static final Log LOG = - LogFactory.getLog(SnappyCompressor.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(SnappyCompressor.class.getName()); private static final int DEFAULT_DIRECT_BUFFER_SIZE = 64 * 1024; private int directBufferSize; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java index 8712431673..f31b76c347 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java @@ -22,19 +22,19 @@ import java.nio.Buffer; import java.nio.ByteBuffer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.io.compress.Decompressor; import org.apache.hadoop.io.compress.DirectDecompressor; import org.apache.hadoop.util.NativeCodeLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link Decompressor} based on the snappy compression algorithm. * http://code.google.com/p/snappy/ */ public class SnappyDecompressor implements Decompressor { - private static final Log LOG = - LogFactory.getLog(SnappyDecompressor.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(SnappyDecompressor.class.getName()); private static final int DEFAULT_DIRECT_BUFFER_SIZE = 64 * 1024; private int directBufferSize; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java index 509456e834..739788fa5f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java @@ -23,9 +23,8 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.Compressor; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A wrapper around java.util.zip.Deflater to make it conform @@ -34,7 +33,8 @@ */ public class BuiltInZlibDeflater extends Deflater implements Compressor { - private static final Log LOG = LogFactory.getLog(BuiltInZlibDeflater.class); + private static final Logger LOG = + LoggerFactory.getLogger(BuiltInZlibDeflater.class); public BuiltInZlibDeflater(int level, boolean nowrap) { super(level, nowrap); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java index 6396fcb7ec..438c8bedad 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java @@ -25,9 +25,8 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.util.NativeCodeLoader; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link Compressor} based on the popular @@ -37,7 +36,8 @@ */ public class ZlibCompressor implements Compressor { - private static final Log LOG = LogFactory.getLog(ZlibCompressor.class); + private static final Logger LOG = + LoggerFactory.getLogger(ZlibCompressor.class); private static final int DEFAULT_DIRECT_BUFFER_SIZE = 64*1024; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java index 9d8e1d9998..93b3b6db32 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java @@ -18,8 +18,6 @@ package org.apache.hadoop.io.compress.zlib; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.io.compress.Decompressor; @@ -29,6 +27,8 @@ import org.apache.hadoop.util.NativeCodeLoader; import com.google.common.annotations.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A collection of factories to create the right @@ -36,8 +36,8 @@ * */ public class ZlibFactory { - private static final Log LOG = - LogFactory.getLog(ZlibFactory.class); + private static final Logger LOG = + LoggerFactory.getLogger(ZlibFactory.class); private static boolean nativeZlibLoaded = false; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java index 75b8fa546c..69df56a632 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java @@ -18,8 +18,6 @@ package org.apache.hadoop.io.erasurecode; import com.google.common.base.Preconditions; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.erasurecode.codec.ErasureCodec; @@ -31,6 +29,8 @@ import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureCoderFactory; import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder; import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; @@ -48,7 +48,7 @@ @InterfaceAudience.Private public final class CodecUtil { - private static final Log LOG = LogFactory.getLog(CodecUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(CodecUtil.class); public static final String IO_ERASURECODE_CODEC = "io.erasurecode.codec."; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ErasureCodeNative.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ErasureCodeNative.java index 1c98f3c227..cb462b8d7c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ErasureCodeNative.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ErasureCodeNative.java @@ -18,17 +18,17 @@ package org.apache.hadoop.io.erasurecode; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.NativeCodeLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Erasure code native libraries (for now, Intel ISA-L) related utilities. */ public final class ErasureCodeNative { - private static final Log LOG = - LogFactory.getLog(ErasureCodeNative.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(ErasureCodeNative.class.getName()); /** * The reason why ISA-L library is not available, or null if it is available. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java index ce93266574..43d829937e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java @@ -30,8 +30,6 @@ import java.util.Map; import java.util.TreeMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -43,6 +41,8 @@ import org.apache.hadoop.io.file.tfile.CompareUtils.ScalarLong; import org.apache.hadoop.io.file.tfile.Compression.Algorithm; import org.apache.hadoop.io.file.tfile.Utils.Version; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Block Compressed file, the underlying physical storage layer for TFile. @@ -54,7 +54,7 @@ final class BCFile { // the current version of BCFile impl, increment them (major or minor) made // enough changes static final Version API_VERSION = new Version((short) 1, (short) 0); - static final Log LOG = LogFactory.getLog(BCFile.class); + static final Logger LOG = LoggerFactory.getLogger(BCFile.class); /** * Prevent the instantiation of BCFile objects. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Compression.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Compression.java index 596b7ea63a..f82f4df2e5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Compression.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Compression.java @@ -24,8 +24,6 @@ import java.io.OutputStream; import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.CodecPool; import org.apache.hadoop.io.compress.CompressionCodec; @@ -35,6 +33,8 @@ import org.apache.hadoop.io.compress.Decompressor; import org.apache.hadoop.io.compress.DefaultCodec; import org.apache.hadoop.util.ReflectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZO_BUFFERSIZE_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZO_BUFFERSIZE_KEY; @@ -44,7 +44,7 @@ * Compression related stuff. */ final class Compression { - static final Log LOG = LogFactory.getLog(Compression.class); + static final Logger LOG = LoggerFactory.getLogger(Compression.class); /** * Prevent the instantiation of class. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java index 56739c6c7e..c63baa550b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java @@ -29,8 +29,6 @@ import java.util.ArrayList; import java.util.Comparator; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -51,6 +49,8 @@ import org.apache.hadoop.io.file.tfile.CompareUtils.MemcmpRawComparator; import org.apache.hadoop.io.file.tfile.Utils.Version; import org.apache.hadoop.io.serializer.JavaSerializationComparator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A TFile is a container of key-value pairs. Both keys and values are type-less @@ -131,7 +131,7 @@ @InterfaceAudience.Public @InterfaceStability.Evolving public class TFile { - static final Log LOG = LogFactory.getLog(TFile.class); + static final Logger LOG = LoggerFactory.getLogger(TFile.class); private static final String CHUNK_BUF_SIZE_ATTR = "tfile.io.chunk.size"; private static final String FS_INPUT_BUF_SIZE_ATTR = @@ -335,7 +335,7 @@ public void close() throws IOException { writerBCF.close(); } } finally { - IOUtils.cleanup(LOG, blkAppender, writerBCF); + IOUtils.cleanupWithLogger(LOG, blkAppender, writerBCF); blkAppender = null; writerBCF = null; state = State.CLOSED; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java index 84b92eceff..3ef6b27892 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java @@ -25,8 +25,6 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -36,12 +34,14 @@ import org.apache.hadoop.io.file.tfile.BCFile.MetaIndexEntry; import org.apache.hadoop.io.file.tfile.TFile.TFileIndexEntry; import org.apache.hadoop.io.file.tfile.Utils.Version; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Dumping the information of a TFile. */ class TFileDumper { - static final Log LOG = LogFactory.getLog(TFileDumper.class); + static final Logger LOG = LoggerFactory.getLogger(TFileDumper.class); private TFileDumper() { // namespace object not constructable. @@ -290,7 +290,7 @@ static public void dumpInfo(String file, PrintStream out, Configuration conf) } } } finally { - IOUtils.cleanup(LOG, reader, fsdis); + IOUtils.cleanupWithLogger(LOG, reader, fsdis); } } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java index b51c9054bb..84cd42c691 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java @@ -40,9 +40,9 @@ import org.apache.hadoop.util.NativeCodeLoader; import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.PerformanceAdvisory; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import sun.misc.Unsafe; import com.google.common.annotations.VisibleForTesting; @@ -98,7 +98,7 @@ public static class POSIX { write. */ public static int SYNC_FILE_RANGE_WAIT_AFTER = 4; - private static final Log LOG = LogFactory.getLog(NativeIO.class); + private static final Logger LOG = LoggerFactory.getLogger(NativeIO.class); // Set to true via JNI if possible public static boolean fadvisePossible = false; @@ -634,7 +634,7 @@ public static boolean access(String path, AccessRight desiredAccess) } } - private static final Log LOG = LogFactory.getLog(NativeIO.class); + private static final Logger LOG = LoggerFactory.getLogger(NativeIO.class); private static boolean nativeLoaded = false; @@ -940,10 +940,10 @@ public static void copyFileUnbuffered(File src, File dst) throws IOException { position += transferred; } } finally { - IOUtils.cleanup(LOG, output); - IOUtils.cleanup(LOG, fos); - IOUtils.cleanup(LOG, input); - IOUtils.cleanup(LOG, fis); + IOUtils.cleanupWithLogger(LOG, output); + IOUtils.cleanupWithLogger(LOG, fos); + IOUtils.cleanupWithLogger(LOG, input); + IOUtils.cleanupWithLogger(LOG, fis); } } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.java index 306244a357..412634462a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.java @@ -22,10 +22,10 @@ import java.io.FileDescriptor; import org.apache.commons.lang.SystemUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A factory for creating shared file descriptors inside a given directory. @@ -45,7 +45,8 @@ @InterfaceAudience.Private @InterfaceStability.Unstable public class SharedFileDescriptorFactory { - public static final Log LOG = LogFactory.getLog(SharedFileDescriptorFactory.class); + public static final Logger LOG = + LoggerFactory.getLogger(SharedFileDescriptorFactory.class); private final String prefix; private final String path; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java index d6f3e04f0e..fa0cb6e6f0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java @@ -32,8 +32,6 @@ import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RetriableException; import org.apache.hadoop.ipc.StandbyException; @@ -41,6 +39,8 @@ import org.apache.hadoop.security.token.SecretManager.InvalidToken; import com.google.common.annotations.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** *

@@ -49,7 +49,7 @@ */ public class RetryPolicies { - public static final Log LOG = LogFactory.getLog(RetryPolicies.class); + public static final Logger LOG = LoggerFactory.getLogger(RetryPolicies.class); /** *

diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java index 15a9b54432..1f5acfea1a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java @@ -19,17 +19,17 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.retry.RetryPolicies.MultipleLinearRandomRetry; import org.apache.hadoop.ipc.RemoteException; import com.google.protobuf.ServiceException; import org.apache.hadoop.ipc.RetriableException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class RetryUtils { - public static final Log LOG = LogFactory.getLog(RetryUtils.class); + public static final Logger LOG = LoggerFactory.getLogger(RetryUtils.class); /** * Return the default retry policy set in conf. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java index 3f177f8eee..969ca3a784 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java @@ -21,8 +21,6 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -31,6 +29,8 @@ import org.apache.hadoop.io.serializer.avro.AvroReflectSerialization; import org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization; import org.apache.hadoop.util.ReflectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** *

@@ -41,8 +41,8 @@ @InterfaceStability.Evolving public class SerializationFactory extends Configured { - static final Log LOG = - LogFactory.getLog(SerializationFactory.class.getName()); + static final Logger LOG = + LoggerFactory.getLogger(SerializationFactory.class.getName()); private List> serializations = new ArrayList>(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java index 2764788579..d1bd1807b0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java @@ -28,20 +28,21 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto; import com.google.common.annotations.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Abstracts queue operations for different blocking queues. */ public class CallQueueManager extends AbstractQueue implements BlockingQueue { - public static final Log LOG = LogFactory.getLog(CallQueueManager.class); + public static final Logger LOG = + LoggerFactory.getLogger(CallQueueManager.class); // Number of checkpoints for empty queue. private static final int CHECKPOINT_NUM = 20; // Interval to check empty queue. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java index 6b21c75131..1daf803913 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java @@ -21,8 +21,6 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.util.concurrent.ThreadFactoryBuilder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability; @@ -57,6 +55,8 @@ import org.apache.hadoop.util.concurrent.AsyncGet; import org.apache.htrace.core.Span; import org.apache.htrace.core.Tracer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.net.SocketFactory; import javax.security.sasl.Sasl; @@ -84,7 +84,7 @@ @InterfaceStability.Evolving public class Client implements AutoCloseable { - public static final Log LOG = LogFactory.getLog(Client.class); + public static final Logger LOG = LoggerFactory.getLogger(Client.class); /** A counter for generating call IDs. */ private static final AtomicInteger callIdCounter = new AtomicInteger(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/FairCallQueue.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/FairCallQueue.java index 8bcaf05936..20161b8e28 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/FairCallQueue.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/FairCallQueue.java @@ -33,11 +33,11 @@ import com.google.common.annotations.VisibleForTesting; import org.apache.commons.lang.NotImplementedException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.CallQueueManager.CallQueueOverflowException; import org.apache.hadoop.metrics2.util.MBeans; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A queue with multiple levels for each priority. @@ -50,7 +50,7 @@ public class FairCallQueue extends AbstractQueue public static final String IPC_CALLQUEUE_PRIORITY_LEVELS_KEY = "faircallqueue.priority-levels"; - public static final Log LOG = LogFactory.getLog(FairCallQueue.class); + public static final Logger LOG = LoggerFactory.getLogger(FairCallQueue.class); /* The queues */ private final ArrayList> queues; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java index b3f545868d..639bbadffb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java @@ -21,8 +21,6 @@ import com.google.common.annotations.VisibleForTesting; import com.google.protobuf.*; import com.google.protobuf.Descriptors.MethodDescriptor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability.Unstable; @@ -39,6 +37,8 @@ import org.apache.hadoop.util.concurrent.AsyncGet; import org.apache.htrace.core.TraceScope; import org.apache.htrace.core.Tracer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.net.SocketFactory; import java.io.IOException; @@ -55,7 +55,8 @@ */ @InterfaceStability.Evolving public class ProtobufRpcEngine implements RpcEngine { - public static final Log LOG = LogFactory.getLog(ProtobufRpcEngine.class); + public static final Logger LOG = + LoggerFactory.getLogger(ProtobufRpcEngine.class); private static final ThreadLocal> ASYNC_RETURN_MESSAGE = new ThreadLocal<>(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java index e16a8f5e63..8f8eda6ded 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java @@ -38,8 +38,6 @@ import javax.net.SocketFactory; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; @@ -60,6 +58,8 @@ import org.apache.hadoop.util.Time; import com.google.protobuf.BlockingService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** A simple RPC mechanism. * @@ -110,7 +110,7 @@ public Writable call(Server server, String protocol, Writable rpcRequest, long receiveTime) throws Exception ; } - static final Log LOG = LogFactory.getLog(RPC.class); + static final Logger LOG = LoggerFactory.getLogger(RPC.class); /** * Get all superInterfaces that extend VersionedProtocol diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java index ee84a04388..e67e8d9cbe 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java @@ -24,9 +24,9 @@ import com.google.common.collect.HashMultimap; import com.google.common.collect.Multimap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Used to registry custom methods to refresh at runtime. @@ -34,7 +34,8 @@ */ @InterfaceStability.Unstable public class RefreshRegistry { - public static final Log LOG = LogFactory.getLog(RefreshRegistry.class); + public static final Logger LOG = + LoggerFactory.getLogger(RefreshRegistry.class); // Used to hold singleton instance private static class RegistryHolder { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java index 7b85286b55..6f6ceb5a6c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java @@ -22,8 +22,6 @@ import java.util.UUID; import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.ipc.metrics.RetryCacheMetrics; import org.apache.hadoop.util.LightWeightCache; @@ -32,6 +30,8 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Maintains a cache of non-idempotent requests that have been successfully @@ -44,7 +44,7 @@ */ @InterfaceAudience.Private public class RetryCache { - public static final Log LOG = LogFactory.getLog(RetryCache.class); + public static final Logger LOG = LoggerFactory.getLogger(RetryCache.class); private final RetryCacheMetrics retryCacheMetrics; private static final int MAX_CAPACITY = 16; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java index df108b80ac..d0694fb640 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java @@ -70,8 +70,6 @@ import javax.security.sasl.SaslException; import javax.security.sasl.SaslServer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; @@ -125,6 +123,8 @@ import com.google.protobuf.ByteString; import com.google.protobuf.CodedOutputStream; import com.google.protobuf.Message; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** An abstract IPC service. IPC calls take a single {@link Writable} as a * parameter, and return a {@link Writable} as their value. A service runs on @@ -293,9 +293,9 @@ public static RpcInvoker getRpcInvoker(RPC.RpcKind rpcKind) { } - public static final Log LOG = LogFactory.getLog(Server.class); - public static final Log AUDITLOG = - LogFactory.getLog("SecurityLogger."+Server.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(Server.class); + public static final Logger AUDITLOG = + LoggerFactory.getLogger("SecurityLogger."+Server.class.getName()); private static final String AUTH_FAILED_FOR = "Auth failed for "; private static final String AUTH_SUCCESSFUL_FOR = "Auth successful for "; @@ -1113,7 +1113,7 @@ private synchronized void doRunLoop() { } catch (IOException ex) { LOG.error("Error in Reader", ex); } catch (Throwable re) { - LOG.fatal("Bug in read selector!", re); + LOG.error("Bug in read selector!", re); ExitUtil.terminate(1, "Bug in read selector!"); } } @@ -2692,7 +2692,7 @@ public void run() { } } finally { CurCall.set(null); - IOUtils.cleanup(LOG, traceScope); + IOUtils.cleanupWithLogger(LOG, traceScope); } } LOG.debug(Thread.currentThread().getName() + ": exiting"); @@ -2701,7 +2701,7 @@ public void run() { } @VisibleForTesting - void logException(Log logger, Throwable e, Call call) { + void logException(Logger logger, Throwable e, Call call) { if (exceptionsHandler.isSuppressedLog(e.getClass())) { return; // Log nothing. } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WeightedRoundRobinMultiplexer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WeightedRoundRobinMultiplexer.java index cfda94734c..d308725c05 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WeightedRoundRobinMultiplexer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WeightedRoundRobinMultiplexer.java @@ -20,9 +20,9 @@ import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Determines which queue to start reading from, occasionally drawing from @@ -43,8 +43,8 @@ public class WeightedRoundRobinMultiplexer implements RpcMultiplexer { public static final String IPC_CALLQUEUE_WRRMUX_WEIGHTS_KEY = "faircallqueue.multiplexer.weights"; - public static final Log LOG = - LogFactory.getLog(WeightedRoundRobinMultiplexer.class); + public static final Logger LOG = + LoggerFactory.getLogger(WeightedRoundRobinMultiplexer.class); private final int numQueues; // The number of queues under our provisioning diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java index 1763c7fb49..fa0726d401 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java @@ -28,8 +28,6 @@ import javax.net.SocketFactory; -import org.apache.commons.logging.*; - import org.apache.hadoop.io.*; import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.ipc.Client.ConnectionId; @@ -43,12 +41,14 @@ import org.apache.hadoop.conf.*; import org.apache.htrace.core.TraceScope; import org.apache.htrace.core.Tracer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** An RpcEngine implementation for Writable data. */ @InterfaceStability.Evolving @Deprecated public class WritableRpcEngine implements RpcEngine { - private static final Log LOG = LogFactory.getLog(RPC.class); + private static final Logger LOG = LoggerFactory.getLogger(RPC.class); //writableRpcVersion should be updated if there is a change //in format of the rpc messages. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RetryCacheMetrics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RetryCacheMetrics.java index a853d642e2..fc09e0afeb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RetryCacheMetrics.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RetryCacheMetrics.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.ipc.metrics; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.ipc.RetryCache; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics2.annotation.Metric; @@ -26,6 +24,8 @@ import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.MetricsRegistry; import org.apache.hadoop.metrics2.lib.MutableCounterLong; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class is for maintaining the various RetryCache-related statistics @@ -35,7 +35,7 @@ @Metrics(about="Aggregate RetryCache metrics", context="rpc") public class RetryCacheMetrics { - static final Log LOG = LogFactory.getLog(RetryCacheMetrics.class); + static final Logger LOG = LoggerFactory.getLogger(RetryCacheMetrics.class); final MetricsRegistry registry; final String name; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java index 8b7e995e47..6ed57ec6d9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java @@ -17,14 +17,14 @@ */ package org.apache.hadoop.ipc.metrics; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics2.annotation.Metric; import org.apache.hadoop.metrics2.annotation.Metrics; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.MetricsRegistry; import org.apache.hadoop.metrics2.lib.MutableRatesWithAggregation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class is for maintaining RPC method related statistics @@ -37,7 +37,7 @@ public class RpcDetailedMetrics { @Metric MutableRatesWithAggregation rates; @Metric MutableRatesWithAggregation deferredRpcRates; - static final Log LOG = LogFactory.getLog(RpcDetailedMetrics.class); + static final Logger LOG = LoggerFactory.getLogger(RpcDetailedMetrics.class); final MetricsRegistry registry; final String name; @@ -45,7 +45,7 @@ public class RpcDetailedMetrics { name = "RpcDetailedActivityForPort"+ port; registry = new MetricsRegistry("rpcdetailed") .tag("port", "RPC port", String.valueOf(port)); - LOG.debug(registry.info()); + LOG.debug(registry.info().toString()); } public String name() { return name; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java index 8ce1379339..d53d7d3fb5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.ipc.metrics; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.classification.InterfaceAudience; @@ -31,6 +29,8 @@ import org.apache.hadoop.metrics2.lib.MutableCounterLong; import org.apache.hadoop.metrics2.lib.MutableQuantiles; import org.apache.hadoop.metrics2.lib.MutableRate; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class is for maintaining the various RPC statistics @@ -40,7 +40,7 @@ @Metrics(about="Aggregate RPC metrics", context="rpc") public class RpcMetrics { - static final Log LOG = LogFactory.getLog(RpcMetrics.class); + static final Logger LOG = LoggerFactory.getLogger(RpcMetrics.class); final Server server; final MetricsRegistry registry; final String name; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java index a8d9fa7bac..33af448594 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java @@ -19,9 +19,9 @@ import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.http.HttpServer2; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.management.AttributeNotFoundException; import javax.management.InstanceNotFoundException; @@ -116,7 +116,8 @@ * */ public class JMXJsonServlet extends HttpServlet { - private static final Log LOG = LogFactory.getLog(JMXJsonServlet.class); + private static final Logger LOG = + LoggerFactory.getLogger(JMXJsonServlet.class); static final String ACCESS_CONTROL_ALLOW_METHODS = "Access-Control-Allow-Methods"; static final String ACCESS_CONTROL_ALLOW_ORIGIN = diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MBeanInfoBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MBeanInfoBuilder.java index a76acac193..528211913d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MBeanInfoBuilder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MBeanInfoBuilder.java @@ -106,7 +106,7 @@ MBeanInfo get() { } ++curRecNo; } - MetricsSystemImpl.LOG.debug(attrs); + MetricsSystemImpl.LOG.debug(attrs.toString()); MBeanAttributeInfo[] attrsArray = new MBeanAttributeInfo[attrs.size()]; return new MBeanInfo(name, description, attrs.toArray(attrsArray), null, null, null); // no ops/ctors/notifications diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java index 8d834d2aff..ac4a24eb3b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java @@ -41,18 +41,18 @@ import org.apache.commons.configuration2.builder.fluent.Parameters; import org.apache.commons.configuration2.convert.DefaultListDelimiterHandler; import org.apache.commons.configuration2.ex.ConfigurationException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.metrics2.MetricsFilter; import org.apache.hadoop.metrics2.MetricsPlugin; import org.apache.hadoop.metrics2.filter.GlobFilter; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Metrics configuration for MetricsSystemImpl */ class MetricsConfig extends SubsetConfiguration { - static final Log LOG = LogFactory.getLog(MetricsConfig.class); + static final Logger LOG = LoggerFactory.getLogger(MetricsConfig.class); static final String DEFAULT_FILE_NAME = "hadoop-metrics2.properties"; static final String PREFIX_DEFAULT = "*."; @@ -121,7 +121,7 @@ static MetricsConfig loadFirst(String prefix, String... fileNames) { LOG.info("loaded properties from "+ fname); LOG.debug(toString(cf)); MetricsConfig mc = new MetricsConfig(cf, prefix); - LOG.debug(mc); + LOG.debug(mc.toString()); return mc; } catch (ConfigurationException e) { // Commons Configuration defines the message text when file not found diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java index b2f3c4aa21..1199ebd6fd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java @@ -24,8 +24,6 @@ import static com.google.common.base.Preconditions.*; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.metrics2.lib.MutableGaugeInt; import org.apache.hadoop.metrics2.lib.MetricsRegistry; @@ -36,13 +34,16 @@ import org.apache.hadoop.metrics2.MetricsFilter; import org.apache.hadoop.metrics2.MetricsSink; import org.apache.hadoop.util.Time; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * An adapter class for metrics sink and associated filters */ class MetricsSinkAdapter implements SinkQueue.Consumer { - private final Log LOG = LogFactory.getLog(MetricsSinkAdapter.class); + private static final Logger LOG = + LoggerFactory.getLogger(MetricsSinkAdapter.class); private final String name, description, context; private final MetricsSink sink; private final MetricsFilter sourceFilter, recordFilter, metricFilter; @@ -207,7 +208,7 @@ void stop() { stopping = true; sinkThread.interrupt(); if (sink instanceof Closeable) { - IOUtils.cleanup(LOG, (Closeable)sink); + IOUtils.cleanupWithLogger(LOG, (Closeable)sink); } try { sinkThread.join(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSourceAdapter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSourceAdapter.java index 3406aceb0a..f12ec67dd6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSourceAdapter.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSourceAdapter.java @@ -33,8 +33,6 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.Maps; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.metrics2.AbstractMetric; import org.apache.hadoop.metrics2.MetricsFilter; @@ -43,6 +41,8 @@ import static org.apache.hadoop.metrics2.impl.MetricsConfig.*; import org.apache.hadoop.metrics2.util.MBeans; import org.apache.hadoop.util.Time; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.metrics2.util.Contracts.*; @@ -51,7 +51,8 @@ */ class MetricsSourceAdapter implements DynamicMBean { - private static final Log LOG = LogFactory.getLog(MetricsSourceAdapter.class); + private static final Logger LOG = + LoggerFactory.getLogger(MetricsSourceAdapter.class); private final String prefix, name; private final MetricsSource source; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java index 24173f5d01..22481223cb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java @@ -36,8 +36,6 @@ import static com.google.common.base.Preconditions.*; import org.apache.commons.configuration2.PropertiesConfiguration; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.commons.math3.util.ArithmeticUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics2.MetricsInfo; @@ -62,6 +60,8 @@ import org.apache.hadoop.metrics2.util.MBeans; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Time; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A base class for metrics system singletons @@ -70,7 +70,7 @@ @Metrics(context="metricssystem") public class MetricsSystemImpl extends MetricsSystem implements MetricsSource { - static final Log LOG = LogFactory.getLog(MetricsSystemImpl.class); + static final Logger LOG = LoggerFactory.getLogger(MetricsSystemImpl.class); static final String MS_NAME = "MetricsSystem"; static final String MS_STATS_NAME = MS_NAME +",sub=Stats"; static final String MS_STATS_DESC = "Metrics system metrics"; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MethodMetric.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MethodMetric.java index 51b2e6613b..3d7a90e7ee 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MethodMetric.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MethodMetric.java @@ -22,20 +22,21 @@ import static com.google.common.base.Preconditions.*; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.metrics2.MetricsException; import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.MetricsInfo; import org.apache.hadoop.metrics2.annotation.Metric; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import static org.apache.hadoop.metrics2.util.Contracts.*; /** * Metric generated from a method, mostly used by annotation */ class MethodMetric extends MutableMetric { - private static final Log LOG = LogFactory.getLog(MethodMetric.class); + private static final Logger LOG = LoggerFactory.getLogger(MethodMetric.class); private final Object obj; private final Method method; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsSourceBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsSourceBuilder.java index 0669297495..1fcede464d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsSourceBuilder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsSourceBuilder.java @@ -24,8 +24,6 @@ import static com.google.common.base.Preconditions.*; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics2.MetricsCollector; import org.apache.hadoop.metrics2.MetricsException; @@ -34,6 +32,8 @@ import org.apache.hadoop.metrics2.annotation.Metric; import org.apache.hadoop.metrics2.annotation.Metrics; import org.apache.hadoop.util.ReflectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Helper class to build {@link MetricsSource} object from annotations. @@ -49,7 +49,8 @@ */ @InterfaceAudience.Private public class MetricsSourceBuilder { - private static final Log LOG = LogFactory.getLog(MetricsSourceBuilder.class); + private static final Logger LOG = + LoggerFactory.getLogger(MetricsSourceBuilder.class); private final Object source; private final MutableMetricsFactory factory; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java index 8b4b083436..b2042e7a12 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java @@ -22,19 +22,20 @@ import java.lang.reflect.Method; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.metrics2.MetricsException; import org.apache.hadoop.metrics2.MetricsInfo; import org.apache.hadoop.metrics2.annotation.Metric; import org.apache.hadoop.metrics2.annotation.Metrics; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private @InterfaceStability.Evolving public class MutableMetricsFactory { - private static final Log LOG = LogFactory.getLog(MutableMetricsFactory.class); + private static final Logger LOG = + LoggerFactory.getLogger(MutableMetricsFactory.class); MutableMetric newForField(Field field, Metric annotation, MetricsRegistry registry) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRates.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRates.java index 1074e87255..994eb13e08 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRates.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRates.java @@ -24,12 +24,11 @@ import static com.google.common.base.Preconditions.*; import com.google.common.collect.Sets; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.metrics2.MetricsRecordBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Helper class to manage a group of mutable rate metrics @@ -43,7 +42,7 @@ @InterfaceAudience.Public @InterfaceStability.Evolving public class MutableRates extends MutableMetric { - static final Log LOG = LogFactory.getLog(MutableRates.class); + static final Logger LOG = LoggerFactory.getLogger(MutableRates.class); private final MetricsRegistry registry; private final Set> protocolCache = Sets.newHashSet(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRatesWithAggregation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRatesWithAggregation.java index 9827ca77e8..26a15063bb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRatesWithAggregation.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRatesWithAggregation.java @@ -27,12 +27,12 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedDeque; import java.util.concurrent.ConcurrentMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.util.SampleStat; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** @@ -48,7 +48,8 @@ @InterfaceAudience.Public @InterfaceStability.Evolving public class MutableRatesWithAggregation extends MutableMetric { - static final Log LOG = LogFactory.getLog(MutableRatesWithAggregation.class); + static final Logger LOG = + LoggerFactory.getLogger(MutableRatesWithAggregation.class); private final Map globalMetrics = new ConcurrentHashMap<>(); private final Set> protocolCache = Sets.newHashSet(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java index 5c58d52e42..de4c14d7af 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java @@ -19,8 +19,6 @@ package org.apache.hadoop.metrics2.sink; import org.apache.commons.configuration2.SubsetConfiguration; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.metrics2.AbstractMetric; @@ -28,6 +26,8 @@ import org.apache.hadoop.metrics2.MetricsRecord; import org.apache.hadoop.metrics2.MetricsSink; import org.apache.hadoop.metrics2.MetricsTag; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.Closeable; import java.io.IOException; @@ -42,7 +42,8 @@ @InterfaceAudience.Public @InterfaceStability.Evolving public class GraphiteSink implements MetricsSink, Closeable { - private static final Log LOG = LogFactory.getLog(GraphiteSink.class); + private static final Logger LOG = + LoggerFactory.getLogger(GraphiteSink.class); private static final String SERVER_HOST_KEY = "server_host"; private static final String SERVER_PORT_KEY = "server_port"; private static final String METRICS_PREFIX = "metrics_prefix"; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java index 4125461881..804e90330f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java @@ -26,11 +26,11 @@ import java.util.Map; import org.apache.commons.configuration2.SubsetConfiguration; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.metrics2.MetricsSink; import org.apache.hadoop.metrics2.util.Servers; import org.apache.hadoop.net.DNS; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This the base class for Ganglia sink classes using metrics2. Lot of the code @@ -41,7 +41,7 @@ */ public abstract class AbstractGangliaSink implements MetricsSink { - public final Log LOG = LogFactory.getLog(this.getClass()); + public final Logger LOG = LoggerFactory.getLogger(this.getClass()); /* * Output of "gmetric --help" showing allowable values @@ -127,7 +127,7 @@ public void init(SubsetConfiguration conf) { conf.getString("dfs.datanode.dns.interface", "default"), conf.getString("dfs.datanode.dns.nameserver", "default")); } catch (UnknownHostException uhe) { - LOG.error(uhe); + LOG.error(uhe.toString()); hostName = "UNKNOWN.example.com"; } } @@ -155,7 +155,7 @@ public void init(SubsetConfiguration conf) { datagramSocket = new DatagramSocket(); } } catch (IOException e) { - LOG.error(e); + LOG.error(e.toString()); } // see if sparseMetrics is supported. Default is false diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java index ffccfb6cdd..3e8314ee88 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java @@ -28,8 +28,6 @@ import org.apache.commons.configuration2.SubsetConfiguration; import org.apache.commons.configuration2.convert.DefaultListDelimiterHandler; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics2.AbstractMetric; import org.apache.hadoop.metrics2.MetricsException; @@ -38,6 +36,8 @@ import org.apache.hadoop.metrics2.impl.MsInfo; import org.apache.hadoop.metrics2.util.MetricsCache; import org.apache.hadoop.metrics2.util.MetricsCache.Record; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This code supports Ganglia 3.0 @@ -45,7 +45,7 @@ */ public class GangliaSink30 extends AbstractGangliaSink { - public final Log LOG = LogFactory.getLog(this.getClass()); + public final Logger LOG = LoggerFactory.getLogger(this.getClass()); private static final String TAGS_FOR_PREFIX_PROPERTY_PREFIX = "tagsForPrefix."; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java index 1d92177cd0..5aebff8c03 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java @@ -18,10 +18,11 @@ package org.apache.hadoop.metrics2.sink.ganglia; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; /** * This code supports Ganglia 3.1 @@ -29,7 +30,7 @@ */ public class GangliaSink31 extends GangliaSink30 { - public final Log LOG = LogFactory.getLog(this.getClass()); + public final Logger LOG = LoggerFactory.getLogger(this.getClass()); /** * The method sends metrics to Ganglia servers. The method has been taken from diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MBeans.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MBeans.java index 7ec07aad7d..ded49d6a6e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MBeans.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MBeans.java @@ -25,11 +25,11 @@ import javax.management.MBeanServer; import javax.management.ObjectName; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This util class provides a method to register an MBean using @@ -39,7 +39,7 @@ @InterfaceAudience.Public @InterfaceStability.Stable public class MBeans { - private static final Log LOG = LogFactory.getLog(MBeans.class); + private static final Logger LOG = LoggerFactory.getLogger(MBeans.class); private static final String DOMAIN_PREFIX = "Hadoop:"; private static final String SERVICE_PREFIX = "service="; private static final String NAME_PREFIX = "name="; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MetricsCache.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MetricsCache.java index cfd126c39a..6cfbc39f89 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MetricsCache.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MetricsCache.java @@ -19,13 +19,13 @@ package org.apache.hadoop.metrics2.util; import com.google.common.collect.Maps; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.metrics2.AbstractMetric; import org.apache.hadoop.metrics2.MetricsRecord; import org.apache.hadoop.metrics2.MetricsTag; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.Collection; import java.util.LinkedHashMap; @@ -39,7 +39,7 @@ @InterfaceAudience.Public @InterfaceStability.Evolving public class MetricsCache { - static final Log LOG = LogFactory.getLog(MetricsCache.class); + static final Logger LOG = LoggerFactory.getLogger(MetricsCache.class); static final int MAX_RECS_PER_NAME_DEFAULT = 1000; private final Map map = Maps.newHashMap(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java index a6dc8e3d37..81041c110a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java @@ -20,10 +20,10 @@ import com.google.common.net.InetAddresses; import com.sun.istack.Nullable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.net.InetAddress; import java.net.NetworkInterface; @@ -52,7 +52,7 @@ @InterfaceStability.Unstable public class DNS { - private static final Log LOG = LogFactory.getLog(DNS.class); + private static final Logger LOG = LoggerFactory.getLogger(DNS.class); /** * The cached hostname -initially null. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java index 40501073fd..85773364c4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java @@ -44,8 +44,6 @@ import javax.net.SocketFactory; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.commons.net.util.SubnetUtils; import org.apache.commons.net.util.SubnetUtils.SubnetInfo; import org.apache.hadoop.classification.InterfaceAudience; @@ -58,11 +56,13 @@ import org.apache.hadoop.util.ReflectionUtils; import com.google.common.base.Preconditions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Unstable public class NetUtils { - private static final Log LOG = LogFactory.getLog(NetUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(NetUtils.class); private static Map hostToResolved = new HashMap(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java index 3dcb61090d..02b44a54fe 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java @@ -21,13 +21,13 @@ import java.util.*; import java.io.*; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.Shell.ShellCommandExecutor; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class implements the {@link DNSToSwitchMapping} interface using a @@ -145,8 +145,8 @@ protected static class RawScriptBasedMapping extends AbstractDNSToSwitchMapping { private String scriptName; private int maxArgs; //max hostnames per call of the script - private static final Log LOG = - LogFactory.getLog(ScriptBasedMapping.class); + private static final Logger LOG = + LoggerFactory.getLogger(ScriptBasedMapping.class); /** * Set the configuration and extract the configuration parameters of interest diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java index b50f7e936b..f489581843 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java @@ -31,9 +31,9 @@ import java.util.Iterator; import java.util.LinkedList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.Time; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This supports input and output streams for a socket channels. @@ -42,7 +42,7 @@ abstract class SocketIOWithTimeout { // This is intentionally package private. - static final Log LOG = LogFactory.getLog(SocketIOWithTimeout.class); + static final Logger LOG = LoggerFactory.getLogger(SocketIOWithTimeout.class); private SelectableChannel channel; private long timeout; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java index 362cf07b43..ead9a7430b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java @@ -29,12 +29,12 @@ import java.util.Map; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** *

@@ -56,7 +56,7 @@ @InterfaceStability.Evolving public class TableMapping extends CachedDNSToSwitchMapping { - private static final Log LOG = LogFactory.getLog(TableMapping.class); + private static final Logger LOG = LoggerFactory.getLogger(TableMapping.class); public TableMapping() { super(new RawTableMapping()); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java index 8379fd1a4b..ac118c0517 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java @@ -29,12 +29,12 @@ import java.nio.ByteBuffer; import org.apache.commons.lang.SystemUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.NativeCodeLoader; import org.apache.hadoop.util.CloseableReferenceCount; import com.google.common.annotations.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The implementation of UNIX domain sockets in Java. @@ -60,7 +60,7 @@ public class DomainSocket implements Closeable { } } - static Log LOG = LogFactory.getLog(DomainSocket.class); + static final Logger LOG = LoggerFactory.getLogger(DomainSocket.class); /** * True only if we should validate the paths used in @@ -459,13 +459,13 @@ public int recvFileInputStreams(FileInputStream[] streams, byte buf[], try { closeFileDescriptor0(descriptors[i]); } catch (Throwable t) { - LOG.warn(t); + LOG.warn(t.toString()); } } else if (streams[i] != null) { try { streams[i].close(); } catch (Throwable t) { - LOG.warn(t); + LOG.warn(t.toString()); } finally { streams[i] = null; } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocketWatcher.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocketWatcher.java index e1bcf7e20c..c7af97f60a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocketWatcher.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocketWatcher.java @@ -33,13 +33,13 @@ import java.util.concurrent.locks.ReentrantLock; import org.apache.commons.lang.SystemUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.NativeCodeLoader; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.util.concurrent.Uninterruptibles; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The DomainSocketWatcher watches a set of domain sockets to see when they @@ -68,7 +68,7 @@ public final class DomainSocketWatcher implements Closeable { } } - static Log LOG = LogFactory.getLog(DomainSocketWatcher.class); + static final Logger LOG = LoggerFactory.getLogger(DomainSocketWatcher.class); /** * The reason why DomainSocketWatcher is not available, or null if it is @@ -306,7 +306,7 @@ public void add(DomainSocket sock, Handler handler) { try { if (closed) { handler.handle(sock); - IOUtils.cleanup(LOG, sock); + IOUtils.cleanupWithLogger(LOG, sock); return; } Entry entry = new Entry(sock, handler); @@ -411,7 +411,7 @@ private boolean sendCallback(String caller, TreeMap entries, this + ": file descriptor " + sock.fd + " was closed while " + "still in the poll(2) loop."); } - IOUtils.cleanup(LOG, sock); + IOUtils.cleanupWithLogger(LOG, sock); fdSet.remove(fd); return true; } else { @@ -524,7 +524,7 @@ public void run() { Entry entry = iter.next(); entry.getDomainSocket().refCount.unreference(); entry.getHandler().handle(entry.getDomainSocket()); - IOUtils.cleanup(LOG, entry.getDomainSocket()); + IOUtils.cleanupWithLogger(LOG, entry.getDomainSocket()); iter.remove(); } // Items in toRemove might not be really removed, handle it here diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationWithProxyUserFilter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationWithProxyUserFilter.java index 751cf02ca5..c97f8ad814 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationWithProxyUserFilter.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationWithProxyUserFilter.java @@ -17,13 +17,13 @@ */ package org.apache.hadoop.security; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.security.authorize.AuthorizationException; import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.http.NameValuePair; import org.apache.http.client.utils.URLEncodedUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.servlet.FilterChain; import javax.servlet.ServletException; @@ -42,8 +42,8 @@ */ public class AuthenticationWithProxyUserFilter extends AuthenticationFilter { - public static final Log LOG = - LogFactory.getLog(AuthenticationWithProxyUserFilter.class); + public static final Logger LOG = + LoggerFactory.getLogger(AuthenticationWithProxyUserFilter.class); /** * Constant used in URL's query string to perform a proxy user request, the diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/CompositeGroupsMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/CompositeGroupsMapping.java index ffa7e2bdbb..b8cfdf71b2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/CompositeGroupsMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/CompositeGroupsMapping.java @@ -25,13 +25,13 @@ import java.util.Set; import java.util.TreeSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.ReflectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * An implementation of {@link GroupMappingServiceProvider} which @@ -48,7 +48,8 @@ public class CompositeGroupsMapping public static final String MAPPING_PROVIDERS_COMBINED_CONFIG_KEY = MAPPING_PROVIDERS_CONFIG_KEY + ".combined"; public static final String MAPPING_PROVIDER_CONFIG_PREFIX = GROUP_MAPPING_CONFIG_PREFIX + ".provider"; - private static final Log LOG = LogFactory.getLog(CompositeGroupsMapping.class); + private static final Logger LOG = + LoggerFactory.getLogger(CompositeGroupsMapping.class); private List providersList = new ArrayList(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java index 1283d8ffa3..4d5898181d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java @@ -36,8 +36,6 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -52,6 +50,8 @@ import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.proto.SecurityProtos.CredentialsKVProto; import org.apache.hadoop.security.proto.SecurityProtos.CredentialsProto; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A class that provides the facilities of reading and writing @@ -60,7 +60,7 @@ @InterfaceAudience.Public @InterfaceStability.Evolving public class Credentials implements Writable { - private static final Log LOG = LogFactory.getLog(Credentials.class); + private static final Logger LOG = LoggerFactory.getLogger(Credentials.class); private Map secretKeysMap = new HashMap(); private Map> tokenMap = @@ -188,7 +188,7 @@ public static Credentials readTokenStorageFile(Path filename, } catch(IOException ioe) { throw new IOException("Exception reading " + filename, ioe); } finally { - IOUtils.cleanup(LOG, in); + IOUtils.cleanupWithLogger(LOG, in); } } @@ -211,7 +211,7 @@ public static Credentials readTokenStorageFile(File filename, } catch(IOException ioe) { throw new IOException("Exception reading " + filename, ioe); } finally { - IOUtils.cleanup(LOG, in); + IOUtils.cleanupWithLogger(LOG, in); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java index 596259a726..ad0986511d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java @@ -59,9 +59,8 @@ import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Timer; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A user-to-groups mapping service. @@ -74,7 +73,7 @@ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Evolving public class Groups { - private static final Log LOG = LogFactory.getLog(Groups.class); + private static final Logger LOG = LoggerFactory.getLogger(Groups.class); private final GroupMappingServiceProvider impl; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HttpCrossOriginFilterInitializer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HttpCrossOriginFilterInitializer.java index f9c1816664..47b5a58e00 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HttpCrossOriginFilterInitializer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HttpCrossOriginFilterInitializer.java @@ -21,20 +21,20 @@ import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.http.FilterContainer; import org.apache.hadoop.http.FilterInitializer; import org.apache.hadoop.security.http.CrossOriginFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class HttpCrossOriginFilterInitializer extends FilterInitializer { public static final String PREFIX = "hadoop.http.cross-origin."; public static final String ENABLED_SUFFIX = "enabled"; - private static final Log LOG = - LogFactory.getLog(HttpCrossOriginFilterInitializer.class); + private static final Logger LOG = + LoggerFactory.getLogger(HttpCrossOriginFilterInitializer.class); @Override public void initFilter(FilterContainer container, Configuration conf) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMapping.java index d397e44001..a0f6142a3c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMapping.java @@ -25,9 +25,9 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.NativeCodeLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A JNI-based implementation of {@link GroupMappingServiceProvider} @@ -38,8 +38,8 @@ @InterfaceStability.Evolving public class JniBasedUnixGroupsMapping implements GroupMappingServiceProvider { - private static final Log LOG = - LogFactory.getLog(JniBasedUnixGroupsMapping.class); + private static final Logger LOG = + LoggerFactory.getLogger(JniBasedUnixGroupsMapping.class); static { if (!NativeCodeLoader.isNativeCodeLoaded()) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMappingWithFallback.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMappingWithFallback.java index 40333fcc5d..f1644305d9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMappingWithFallback.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMappingWithFallback.java @@ -21,16 +21,16 @@ import java.io.IOException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.NativeCodeLoader; import org.apache.hadoop.util.PerformanceAdvisory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class JniBasedUnixGroupsMappingWithFallback implements GroupMappingServiceProvider { - private static final Log LOG = LogFactory - .getLog(JniBasedUnixGroupsMappingWithFallback.class); + private static final Logger LOG = LoggerFactory + .getLogger(JniBasedUnixGroupsMappingWithFallback.class); private GroupMappingServiceProvider impl; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.java index ff4ab989e7..9ba55e436f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.java @@ -26,11 +26,11 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.NativeCodeLoader; import org.apache.hadoop.security.NetgroupCache; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A JNI-based implementation of {@link GroupMappingServiceProvider} @@ -42,7 +42,7 @@ public class JniBasedUnixGroupsNetgroupMapping extends JniBasedUnixGroupsMapping { - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( JniBasedUnixGroupsNetgroupMapping.class); native String[] getUsersForNetgroupJNI(String group); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMappingWithFallback.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMappingWithFallback.java index 7d77c1097b..fcc47cb796 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMappingWithFallback.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMappingWithFallback.java @@ -21,15 +21,15 @@ import java.io.IOException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.NativeCodeLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class JniBasedUnixGroupsNetgroupMappingWithFallback implements GroupMappingServiceProvider { - private static final Log LOG = LogFactory - .getLog(JniBasedUnixGroupsNetgroupMappingWithFallback.class); + private static final Logger LOG = LoggerFactory + .getLogger(JniBasedUnixGroupsNetgroupMappingWithFallback.class); private GroupMappingServiceProvider impl; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java index 1a184e842b..babfa3809b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java @@ -41,12 +41,12 @@ import javax.naming.ldap.LdapName; import javax.naming.ldap.Rdn; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * An implementation of {@link GroupMappingServiceProvider} which @@ -211,7 +211,8 @@ public class LdapGroupsMapping LDAP_CONFIG_PREFIX + ".read.timeout.ms"; public static final int READ_TIMEOUT_DEFAULT = 60 * 1000; // 60 seconds - private static final Log LOG = LogFactory.getLog(LdapGroupsMapping.class); + private static final Logger LOG = + LoggerFactory.getLogger(LdapGroupsMapping.class); static final SearchControls SEARCH_CONTROLS = new SearchControls(); static { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java index 013e56c9e6..8dcf8b95e0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java @@ -26,14 +26,14 @@ import com.google.common.annotations.VisibleForTesting; import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.security.alias.CredentialProviderFactory; import org.apache.hadoop.security.alias.JavaKeyStoreProvider; import org.apache.hadoop.security.alias.LocalJavaKeyStoreProvider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Utility methods for both key and credential provider APIs. @@ -57,7 +57,8 @@ public final class ProviderUtils { "Please review the documentation regarding provider passwords in\n" + "the keystore passwords section of the Credential Provider API\n"; - private static final Log LOG = LogFactory.getLog(ProviderUtils.class); + private static final Logger LOG = + LoggerFactory.getLogger(ProviderUtils.class); /** * Hidden ctor to ensure that this utility class isn't diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java index a3d66b977c..a91a90ac7c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java @@ -30,10 +30,10 @@ import javax.security.sasl.SaslException; import javax.security.sasl.SaslServer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A SaslInputStream is composed of an InputStream and a SaslServer (or @@ -45,7 +45,8 @@ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Evolving public class SaslInputStream extends InputStream implements ReadableByteChannel { - public static final Log LOG = LogFactory.getLog(SaslInputStream.class); + public static final Logger LOG = + LoggerFactory.getLogger(SaslInputStream.class); private final DataInputStream inStream; /** Should we wrap the communication channel? */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java index 388f1b298a..11714b15bd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java @@ -45,8 +45,6 @@ import javax.security.sasl.SaslException; import javax.security.sasl.SaslClient; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -75,6 +73,9 @@ import com.google.common.annotations.VisibleForTesting; import com.google.protobuf.ByteString; import com.google.re2j.Pattern; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + /** * A utility class that encapsulates SASL logic for RPC client */ @@ -82,7 +83,7 @@ @InterfaceStability.Evolving public class SaslRpcClient { // This log is public as it is referenced in tests - public static final Log LOG = LogFactory.getLog(SaslRpcClient.class); + public static final Logger LOG = LoggerFactory.getLogger(SaslRpcClient.class); private final UserGroupInformation ugi; private final Class protocol; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java index a6fbb6dcef..643af79e4b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java @@ -45,8 +45,6 @@ import javax.security.sasl.SaslServerFactory; import org.apache.commons.codec.binary.Base64; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -57,6 +55,8 @@ import org.apache.hadoop.security.token.SecretManager; import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.security.token.TokenIdentifier; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A utility class for dealing with SASL on RPC server @@ -64,7 +64,7 @@ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Evolving public class SaslRpcServer { - public static final Log LOG = LogFactory.getLog(SaslRpcServer.class); + public static final Logger LOG = LoggerFactory.getLogger(SaslRpcServer.class); public static final String SASL_DEFAULT_REALM = "default"; private static SaslServerFactory saslFactory; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java index b7d1ec0482..20e8754492 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java @@ -36,8 +36,6 @@ import javax.security.auth.kerberos.KerberosPrincipal; import javax.security.auth.kerberos.KerberosTicket; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -51,7 +49,8 @@ import org.apache.hadoop.util.StopWatch; import org.apache.hadoop.util.StringUtils; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; //this will need to be replaced someday when there is a suitable replacement import sun.net.dns.ResolverConfiguration; import sun.net.util.IPAddressUtil; @@ -64,7 +63,7 @@ @InterfaceAudience.Public @InterfaceStability.Evolving public final class SecurityUtil { - public static final Log LOG = LogFactory.getLog(SecurityUtil.class); + public static final Logger LOG = LoggerFactory.getLogger(SecurityUtil.class); public static final String HOSTNAME_PATTERN = "_HOST"; public static final String FAILED_TO_GET_UGI_MSG_HEADER = "Failed to obtain user group information:"; @@ -473,7 +472,7 @@ public static T doAsLoginUserOrFatal(PrivilegedAction action) { try { ugi = UserGroupInformation.getLoginUser(); } catch (IOException e) { - LOG.fatal("Exception while getting login user", e); + LOG.error("Exception while getting login user", e); e.printStackTrace(); Runtime.getRuntime().exit(-1); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java index efc1fd6e85..2ed96770a8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java @@ -29,14 +29,14 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.Time; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.BiMap; import com.google.common.collect.HashBiMap; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A simple shell-based implementation of {@link IdMappingServiceProvider} @@ -62,8 +62,8 @@ */ public class ShellBasedIdMapping implements IdMappingServiceProvider { - private static final Log LOG = - LogFactory.getLog(ShellBasedIdMapping.class); + private static final Logger LOG = + LoggerFactory.getLogger(ShellBasedIdMapping.class); private final static String OS = System.getProperty("os.name"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java index 4aa4e9f55b..eff6985471 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java @@ -23,12 +23,12 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.Shell.ExitCodeException; import org.apache.hadoop.security.NetgroupCache; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A simple shell-based implementation of {@link GroupMappingServiceProvider} @@ -40,8 +40,8 @@ public class ShellBasedUnixGroupsNetgroupMapping extends ShellBasedUnixGroupsMapping { - private static final Log LOG = - LogFactory.getLog(ShellBasedUnixGroupsNetgroupMapping.class); + private static final Logger LOG = + LoggerFactory.getLogger(ShellBasedUnixGroupsNetgroupMapping.class); /** * Get unix groups (parent) and netgroups for given user diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java index 8d4df64299..a64c4de7b6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java @@ -24,13 +24,13 @@ import javax.security.sasl.Sasl; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.SaslPropertiesResolver; import org.apache.hadoop.security.SaslRpcServer.QualityOfProtection; import org.apache.hadoop.util.CombinedIPWhiteList; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** @@ -54,7 +54,8 @@ * */ public class WhitelistBasedResolver extends SaslPropertiesResolver { - public static final Log LOG = LogFactory.getLog(WhitelistBasedResolver.class); + public static final Logger LOG = + LoggerFactory.getLogger(WhitelistBasedResolver.class); private static final String FIXEDWHITELIST_DEFAULT_LOCATION = "/etc/hadoop/fixedwhitelist"; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java index 8e4a0a5f81..df783f16ed 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java @@ -18,8 +18,6 @@ package org.apache.hadoop.security.alias; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; @@ -27,6 +25,8 @@ import org.apache.hadoop.security.ProviderUtils; import com.google.common.base.Charsets; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.crypto.spec.SecretKeySpec; import java.io.IOException; @@ -60,7 +60,7 @@ */ @InterfaceAudience.Private public abstract class AbstractJavaKeyStoreProvider extends CredentialProvider { - public static final Log LOG = LogFactory.getLog( + public static final Logger LOG = LoggerFactory.getLogger( AbstractJavaKeyStoreProvider.class); public static final String CREDENTIAL_PASSWORD_ENV_VAR = "HADOOP_CREDSTORE_PASSWORD"; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java index 9da95dc791..4c47348fa5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java @@ -23,8 +23,6 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability; @@ -36,6 +34,8 @@ import org.apache.hadoop.util.MachineList; import com.google.common.annotations.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * An authorization manager which handles service-level authorization @@ -69,8 +69,9 @@ public class ServiceAuthorizationManager { public static final String SERVICE_AUTHORIZATION_CONFIG = "hadoop.security.authorization"; - public static final Log AUDITLOG = - LogFactory.getLog("SecurityLogger."+ServiceAuthorizationManager.class.getName()); + public static final Logger AUDITLOG = + LoggerFactory.getLogger( + "SecurityLogger." + ServiceAuthorizationManager.class.getName()); private static final String AUTHZ_SUCCESSFUL_FOR = "Authorization successful for "; private static final String AUTHZ_FAILED_FOR = "Authorization failed for "; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/CrossOriginFilter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/CrossOriginFilter.java index ea7876224b..58d50cf972 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/CrossOriginFilter.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/CrossOriginFilter.java @@ -35,14 +35,15 @@ import javax.servlet.http.HttpServletResponse; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import com.google.common.annotations.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class CrossOriginFilter implements Filter { - private static final Log LOG = LogFactory.getLog(CrossOriginFilter.class); + private static final Logger LOG = + LoggerFactory.getLogger(CrossOriginFilter.class); // HTTP CORS Request Headers static final String ORIGIN = "Origin"; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java index af8e32c46c..b0df8f0121 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java @@ -18,12 +18,12 @@ package org.apache.hadoop.security.ssl; import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.net.ssl.KeyManager; import javax.net.ssl.KeyManagerFactory; @@ -47,8 +47,8 @@ @InterfaceStability.Evolving public class FileBasedKeyStoresFactory implements KeyStoresFactory { - private static final Log LOG = - LogFactory.getLog(FileBasedKeyStoresFactory.class); + private static final Logger LOG = + LoggerFactory.getLogger(FileBasedKeyStoresFactory.class); public static final String SSL_KEYSTORE_LOCATION_TPL_KEY = "ssl.{0}.keystore.location"; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509TrustManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509TrustManager.java index 2d3afea69c..88b045e2f9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509TrustManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509TrustManager.java @@ -18,12 +18,12 @@ package org.apache.hadoop.security.ssl; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import com.google.common.annotations.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.net.ssl.TrustManager; import javax.net.ssl.TrustManagerFactory; @@ -47,7 +47,8 @@ public final class ReloadingX509TrustManager implements X509TrustManager, Runnable { @VisibleForTesting - static final Log LOG = LogFactory.getLog(ReloadingX509TrustManager.class); + static final Logger LOG = + LoggerFactory.getLogger(ReloadingX509TrustManager.class); @VisibleForTesting static final String RELOAD_ERROR_MESSAGE = "Could not load truststore (keep using existing one) : "; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtFileOperations.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtFileOperations.java index 1396054343..d128cc9bd1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtFileOperations.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtFileOperations.java @@ -27,20 +27,21 @@ import java.util.ServiceLoader; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * DtFileOperations is a collection of delegation token file operations. */ public final class DtFileOperations { - private static final Log LOG = LogFactory.getLog(DtFileOperations.class); + private static final Logger LOG = + LoggerFactory.getLogger(DtFileOperations.class); /** No public constructor as per checkstyle. */ private DtFileOperations() { } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtUtilShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtUtilShell.java index f00e6fd2dc..88db34fb84 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtUtilShell.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtUtilShell.java @@ -22,19 +22,19 @@ import java.io.IOException; import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Text; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.tools.CommandShell; import org.apache.hadoop.util.ToolRunner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * DtUtilShell is a set of command line token file management operations. */ public class DtUtilShell extends CommandShell { - private static final Log LOG = LogFactory.getLog(DtUtilShell.class); + private static final Logger LOG = LoggerFactory.getLogger(DtUtilShell.class); private static final String FORMAT_SUBSTRING = "[-format (" + DtFileOperations.FORMAT_JAVA + "|" + diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java index 99cc8c7422..33cb9ec98f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java @@ -23,14 +23,14 @@ import com.google.common.primitives.Bytes; import org.apache.commons.codec.binary.Base64; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.*; import org.apache.hadoop.security.proto.SecurityProtos.TokenProto; import org.apache.hadoop.util.ReflectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.*; import java.util.Arrays; @@ -44,7 +44,7 @@ @InterfaceAudience.Public @InterfaceStability.Evolving public class Token implements Writable { - public static final Log LOG = LogFactory.getLog(Token.class); + public static final Logger LOG = LoggerFactory.getLogger(Token.class); private static Map> tokenKindMap; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java index cf88745032..f06681b9f5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java @@ -30,8 +30,6 @@ import javax.crypto.SecretKey; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.io.Text; @@ -43,6 +41,8 @@ import org.apache.hadoop.util.Time; import com.google.common.base.Preconditions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Public @InterfaceStability.Evolving @@ -50,8 +50,8 @@ class AbstractDelegationTokenSecretManager extends SecretManager { - private static final Log LOG = LogFactory - .getLog(AbstractDelegationTokenSecretManager.class); + private static final Logger LOG = LoggerFactory + .getLogger(AbstractDelegationTokenSecretManager.class); private String formatTokenId(TokenIdent id) { return "(" + id + ")"; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/AbstractService.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/AbstractService.java index 132768386a..2a1140f751 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/AbstractService.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/AbstractService.java @@ -25,13 +25,13 @@ import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.conf.Configuration; import com.google.common.annotations.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This is the base implementation class for services. @@ -40,7 +40,8 @@ @Evolving public abstract class AbstractService implements Service { - private static final Log LOG = LogFactory.getLog(AbstractService.class); + private static final Logger LOG = + LoggerFactory.getLogger(AbstractService.class); /** * Service name. @@ -258,7 +259,7 @@ public final void close() throws IOException { */ protected final void noteFailure(Exception exception) { if (LOG.isDebugEnabled()) { - LOG.debug("noteFailure " + exception, null); + LOG.debug("noteFailure " + exception, (Throwable) null); } if (exception == null) { //make sure failure logic doesn't itself cause problems diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java index 51cb4a336d..a5e8c895c6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java @@ -21,11 +21,11 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.conf.Configuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Composition of services. @@ -34,7 +34,8 @@ @Evolving public class CompositeService extends AbstractService { - private static final Log LOG = LogFactory.getLog(CompositeService.class); + private static final Logger LOG = + LoggerFactory.getLogger(CompositeService.class); /** * Policy on shutdown: attempt to close everything (purest) or diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/LoggingStateChangeListener.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/LoggingStateChangeListener.java index 700999d625..c978fecf2d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/LoggingStateChangeListener.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/LoggingStateChangeListener.java @@ -18,10 +18,10 @@ package org.apache.hadoop.service; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Evolving; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This is a state change listener that logs events at INFO level @@ -30,15 +30,16 @@ @Evolving public class LoggingStateChangeListener implements ServiceStateChangeListener { - private static final Log LOG = LogFactory.getLog(LoggingStateChangeListener.class); + private static final Logger LOG = + LoggerFactory.getLogger(LoggingStateChangeListener.class); - private final Log log; + private final Logger log; /** * Log events to the given log * @param log destination for events */ - public LoggingStateChangeListener(Log log) { + public LoggingStateChangeListener(Logger log) { //force an NPE if a null log came in log.isDebugEnabled(); this.log = log; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceOperations.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceOperations.java index a0a77ceb30..e7683a2cb2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceOperations.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceOperations.java @@ -22,10 +22,10 @@ import java.util.List; import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class contains a set of methods to work with services, especially @@ -34,7 +34,8 @@ @Public @Evolving public final class ServiceOperations { - private static final Log LOG = LogFactory.getLog(AbstractService.class); + private static final Logger LOG = + LoggerFactory.getLogger(AbstractService.class); private ServiceOperations() { } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TracerConfigurationManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TracerConfigurationManager.java index 75601adb19..658e4d326b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TracerConfigurationManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TracerConfigurationManager.java @@ -19,13 +19,13 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.tracing.SpanReceiverInfo.ConfigurationPair; import org.apache.htrace.core.SpanReceiver; import org.apache.htrace.core.TracerPool; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class provides functions for managing the tracer configuration at @@ -33,8 +33,8 @@ */ @InterfaceAudience.Private public class TracerConfigurationManager implements TraceAdminProtocol { - private static final Log LOG = - LogFactory.getLog(TracerConfigurationManager.class); + private static final Logger LOG = + LoggerFactory.getLogger(TracerConfigurationManager.class); private final String confPrefix; private final Configuration conf; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ApplicationClassLoader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ApplicationClassLoader.java index 2f46e1fee5..972bbff4cf 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ApplicationClassLoader.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ApplicationClassLoader.java @@ -29,12 +29,12 @@ import java.util.List; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link URLClassLoader} for application isolation. Classes from the @@ -56,8 +56,8 @@ public class ApplicationClassLoader extends URLClassLoader { private static final String SYSTEM_CLASSES_DEFAULT_KEY = "system.classes.default"; - private static final Log LOG = - LogFactory.getLog(ApplicationClassLoader.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(ApplicationClassLoader.class.getName()); static { try (InputStream is = ApplicationClassLoader.class.getClassLoader() @@ -179,7 +179,7 @@ protected synchronized Class loadClass(String name, boolean resolve) } } catch (ClassNotFoundException e) { if (LOG.isDebugEnabled()) { - LOG.debug(e); + LOG.debug(e.toString()); } ex = e; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java index df151666d4..8e48cb955a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java @@ -26,10 +26,10 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /* * This class is a container of multiple thread pools, each for a volume, @@ -43,7 +43,8 @@ @InterfaceStability.Unstable public class AsyncDiskService { - public static final Log LOG = LogFactory.getLog(AsyncDiskService.class); + public static final Logger LOG = + LoggerFactory.getLogger(AsyncDiskService.class); // ThreadPool core pool size private static final int CORE_THREADS_PER_VOLUME = 1; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CombinedIPWhiteList.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CombinedIPWhiteList.java index d12c4c11d5..6d42dc0362 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CombinedIPWhiteList.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CombinedIPWhiteList.java @@ -17,12 +17,13 @@ */ package org.apache.hadoop.util; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class CombinedIPWhiteList implements IPList { - public static final Log LOG = LogFactory.getLog(CombinedIPWhiteList.class); + public static final Logger LOG = + LoggerFactory.getLogger(CombinedIPWhiteList.class); private static final String LOCALHOST_IP = "127.0.0.1"; private final IPList[] networkLists; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java index 6ee1212df3..146f65c6cc 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java @@ -17,6 +17,9 @@ */ package org.apache.hadoop.util; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; @@ -29,9 +32,6 @@ import java.util.HashSet; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - /** * FileBasedIPList loads a list of subnets in CIDR format and ip addresses from * a file. @@ -43,7 +43,8 @@ */ public class FileBasedIPList implements IPList { - private static final Log LOG = LogFactory.getLog(FileBasedIPList.class); + private static final Logger LOG = + LoggerFactory.getLogger(FileBasedIPList.class); private final String fileName; private final MachineList addressList; @@ -107,7 +108,7 @@ private static String[] readLines(String fileName) throws IOException { } } } catch (IOException ioe) { - LOG.error(ioe); + LOG.error(ioe.toString()); throw ioe; } return null; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GSet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GSet.java index e4a8d0f5b5..fbc1418288 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GSet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GSet.java @@ -19,9 +19,9 @@ import java.util.Collection; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link GSet} is set, @@ -35,7 +35,7 @@ */ @InterfaceAudience.Private public interface GSet extends Iterable { - static final Log LOG = LogFactory.getLog(GSet.class); + Logger LOG = LoggerFactory.getLogger(GSet.class); /** * @return The size of this set. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java index 835206a89f..ac9776fcfa 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java @@ -34,8 +34,6 @@ import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -45,6 +43,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * GenericOptionsParser is a utility to parse command line @@ -113,7 +113,8 @@ @InterfaceStability.Evolving public class GenericOptionsParser { - private static final Log LOG = LogFactory.getLog(GenericOptionsParser.class); + private static final Logger LOG = + LoggerFactory.getLogger(GenericOptionsParser.class); private Configuration conf; private CommandLine commandLine; private final boolean parseSuccessful; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java index 340f792f58..67b0247729 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java @@ -31,11 +31,11 @@ import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; @@ -47,7 +47,8 @@ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Unstable public class HostsFileReader { - private static final Log LOG = LogFactory.getLog(HostsFileReader.class); + private static final Logger LOG = LoggerFactory.getLogger(HostsFileReader + .class); private final AtomicReference current; @@ -171,7 +172,7 @@ public static void readXmlFileToMapWithFileInputStream(String type, } } } catch (IOException|SAXException|ParserConfigurationException e) { - LOG.fatal("error parsing " + filename, e); + LOG.error("error parsing " + filename, e); throw new RuntimeException(e); } finally { fileInputStream.close(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IntrusiveCollection.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IntrusiveCollection.java index 0512d4aa5d..1ffb7db3fe 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IntrusiveCollection.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IntrusiveCollection.java @@ -21,11 +21,11 @@ import java.util.Iterator; import java.util.NoSuchElementException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import com.google.common.base.Preconditions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Implements an intrusive doubly-linked list. @@ -298,7 +298,8 @@ public boolean addFirst(Element elem) { return true; } - public static final Log LOG = LogFactory.getLog(IntrusiveCollection.class); + public static final Logger LOG = + LoggerFactory.getLogger(IntrusiveCollection.class); @Override public boolean remove(Object o) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java index 80d4468dc5..420ac8bc18 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java @@ -24,8 +24,6 @@ import java.util.Set; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.service.AbstractService; @@ -35,6 +33,8 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Class which sets up a simple thread which runs in a loop sleeping @@ -45,7 +45,7 @@ */ @InterfaceAudience.Private public class JvmPauseMonitor extends AbstractService { - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( JvmPauseMonitor.class); /** The target sleep time */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java index 2e6c079d0f..b01330f243 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java @@ -26,12 +26,12 @@ import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.commons.net.util.SubnetUtils; import com.google.common.annotations.VisibleForTesting; import com.google.common.net.InetAddresses; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Container class which holds a list of ip/host addresses and @@ -43,7 +43,7 @@ public class MachineList { - public static final Log LOG = LogFactory.getLog(MachineList.class); + public static final Logger LOG = LoggerFactory.getLogger(MachineList.class); public static final String WILDCARD_VALUE = "*"; /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java index c38133611f..a8a380ed07 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java @@ -18,10 +18,10 @@ package org.apache.hadoop.util; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A helper to load the native hadoop code i.e. libhadoop.so. @@ -33,8 +33,8 @@ @InterfaceStability.Unstable public final class NativeCodeLoader { - private static final Log LOG = - LogFactory.getLog(NativeCodeLoader.class); + private static final Logger LOG = + LoggerFactory.getLogger(NativeCodeLoader.class); private static boolean nativeCodeLoaded = false; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NodeHealthScriptRunner.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NodeHealthScriptRunner.java index fc392c495b..cf1e46053a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NodeHealthScriptRunner.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NodeHealthScriptRunner.java @@ -25,8 +25,6 @@ import java.util.Timer; import java.util.TimerTask; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.service.AbstractService; @@ -34,6 +32,8 @@ import org.apache.hadoop.util.Shell.ShellCommandExecutor; import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * @@ -43,7 +43,8 @@ */ public class NodeHealthScriptRunner extends AbstractService { - private static Log LOG = LogFactory.getLog(NodeHealthScriptRunner.class); + private static final Logger LOG = + LoggerFactory.getLogger(NodeHealthScriptRunner.class); /** Absolute path to the health script. */ private String nodeHealthScript; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java index 3fbc9350f1..bd1c0f4a62 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java @@ -20,10 +20,10 @@ import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** Utility to assist with generation of progress reports. Applications build * a hierarchy of {@link Progress} instances, each modelling a phase of @@ -33,7 +33,7 @@ @InterfaceAudience.LimitedPrivate({"MapReduce"}) @InterfaceStability.Unstable public class Progress { - private static final Log LOG = LogFactory.getLog(Progress.class); + private static final Logger LOG = LoggerFactory.getLogger(Progress.class); private String status = ""; private float progress; private int currentPhase; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java index 81983f0989..153f92b825 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java @@ -18,9 +18,9 @@ package org.apache.hadoop.util; import com.google.common.util.concurrent.ThreadFactoryBuilder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.concurrent.HadoopExecutors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Collections; @@ -47,7 +47,8 @@ public class ShutdownHookManager { private static final ShutdownHookManager MGR = new ShutdownHookManager(); - private static final Log LOG = LogFactory.getLog(ShutdownHookManager.class); + private static final Logger LOG = + LoggerFactory.getLogger(ShutdownHookManager.class); private static final long TIMEOUT_DEFAULT = 10; private static final TimeUnit TIME_UNIT_DEFAULT = TimeUnit.SECONDS; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownThreadsHelper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownThreadsHelper.java index ffd88fb97a..5405d7756a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownThreadsHelper.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownThreadsHelper.java @@ -19,8 +19,8 @@ package org.apache.hadoop.util; import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; @@ -29,7 +29,8 @@ * Helper class to shutdown {@link Thread}s and {@link ExecutorService}s. */ public class ShutdownThreadsHelper { - private static Log LOG = LogFactory.getLog(ShutdownThreadsHelper.class); + private static final Logger LOG = + LoggerFactory.getLogger(ShutdownThreadsHelper.class); @VisibleForTesting static final int SHUTDOWN_WAIT_MS = 3000; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoLinux.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoLinux.java index bba16316d6..7fd19907fd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoLinux.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoLinux.java @@ -32,11 +32,11 @@ import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.util.Shell.ShellCommandExecutor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Plugin to calculate resource information on Linux systems. @@ -44,8 +44,8 @@ @InterfaceAudience.Private @InterfaceStability.Evolving public class SysInfoLinux extends SysInfo { - private static final Log LOG = - LogFactory.getLog(SysInfoLinux.class); + private static final Logger LOG = + LoggerFactory.getLogger(SysInfoLinux.class); /** * proc's meminfo virtual file has keys-values in the format diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoWindows.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoWindows.java index bce2d6d3b8..e8940141b4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoWindows.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoWindows.java @@ -21,11 +21,11 @@ import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.util.Shell.ShellCommandExecutor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Plugin to calculate resource information on Windows systems. @@ -34,7 +34,8 @@ @InterfaceStability.Evolving public class SysInfoWindows extends SysInfo { - private static final Log LOG = LogFactory.getLog(SysInfoWindows.class); + private static final Logger LOG = + LoggerFactory.getLogger(SysInfoWindows.class); private long vmemSize; private long memSize; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ThreadUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ThreadUtil.java index ab7b5fdedd..6444428a27 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ThreadUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ThreadUtil.java @@ -17,10 +17,9 @@ */ package org.apache.hadoop.util; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.InputStream; @@ -28,7 +27,7 @@ @InterfaceStability.Evolving public class ThreadUtil { - private static final Log LOG = LogFactory.getLog(ThreadUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(ThreadUtil.class); /** * Cause the current thread to sleep as close as possible to the provided diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java index 10e2590ba8..ca09050088 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java @@ -22,11 +22,11 @@ import java.io.InputStream; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.io.IOUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class returns build information about Hadoop components. @@ -34,7 +34,7 @@ @InterfaceAudience.Public @InterfaceStability.Stable public class VersionInfo { - private static final Log LOG = LogFactory.getLog(VersionInfo.class); + private static final Logger LOG = LoggerFactory.getLogger(VersionInfo.class); private Properties info; @@ -46,7 +46,7 @@ protected VersionInfo(String component) { is = ThreadUtil.getResourceAsStream(versionInfoFile); info.load(is); } catch (IOException ex) { - LogFactory.getLog(getClass()).warn("Could not read '" + + LoggerFactory.getLogger(getClass()).warn("Could not read '" + versionInfoFile + "', " + ex.toString(), ex); } finally { IOUtils.closeStream(is); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGetFuture.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGetFuture.java index d6878670ff..61eb777d48 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGetFuture.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGetFuture.java @@ -18,8 +18,8 @@ package org.apache.hadoop.util.concurrent; import com.google.common.util.concurrent.AbstractFuture; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; @@ -29,7 +29,8 @@ /** A {@link Future} implemented using an {@link AsyncGet} object. */ public class AsyncGetFuture extends AbstractFuture { - public static final Log LOG = LogFactory.getLog(AsyncGetFuture.class); + public static final Logger LOG = + LoggerFactory.getLogger(AsyncGetFuture.class); private final AtomicBoolean called = new AtomicBoolean(false); private final AsyncGet asyncGet; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/ExecutorHelper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/ExecutorHelper.java index 3bc9ed9ea9..02452a3ccd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/ExecutorHelper.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/ExecutorHelper.java @@ -20,8 +20,8 @@ package org.apache.hadoop.util.concurrent; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; @@ -29,8 +29,8 @@ /** Helper functions for Executors. */ public final class ExecutorHelper { - private static final Log LOG = LogFactory - .getLog(ExecutorHelper.class); + private static final Logger LOG = LoggerFactory + .getLogger(ExecutorHelper.class); static void logThrowableFromAfterExecute(Runnable r, Throwable t) { if (LOG.isDebugEnabled()) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopScheduledThreadPoolExecutor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopScheduledThreadPoolExecutor.java index 8d910b6cca..78e729b81d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopScheduledThreadPoolExecutor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopScheduledThreadPoolExecutor.java @@ -20,8 +20,8 @@ package org.apache.hadoop.util.concurrent; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.concurrent.RejectedExecutionHandler; import java.util.concurrent.ScheduledThreadPoolExecutor; @@ -32,8 +32,8 @@ public class HadoopScheduledThreadPoolExecutor extends ScheduledThreadPoolExecutor { - private static final Log LOG = LogFactory - .getLog(HadoopScheduledThreadPoolExecutor.class); + private static final Logger LOG = LoggerFactory + .getLogger(HadoopScheduledThreadPoolExecutor.class); public HadoopScheduledThreadPoolExecutor(int corePoolSize) { super(corePoolSize); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopThreadPoolExecutor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopThreadPoolExecutor.java index bcf26cb17c..fa845b75e3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopThreadPoolExecutor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopThreadPoolExecutor.java @@ -20,8 +20,8 @@ package org.apache.hadoop.util.concurrent; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.concurrent.BlockingQueue; import java.util.concurrent.RejectedExecutionHandler; @@ -34,8 +34,8 @@ * */ public final class HadoopThreadPoolExecutor extends ThreadPoolExecutor { - private static final Log LOG = LogFactory - .getLog(HadoopThreadPoolExecutor.class); + private static final Logger LOG = LoggerFactory + .getLogger(HadoopThreadPoolExecutor.class); public HadoopThreadPoolExecutor(int corePoolSize, int maximumPoolSize, diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/CryptoStreamsTestBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/CryptoStreamsTestBase.java index f9c8c165ed..9183524a1a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/CryptoStreamsTestBase.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/CryptoStreamsTestBase.java @@ -26,8 +26,6 @@ import java.util.EnumSet; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.ByteBufferReadable; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSExceptionMessages; @@ -43,9 +41,11 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public abstract class CryptoStreamsTestBase { - protected static final Log LOG = LogFactory.getLog( + protected static final Logger LOG = LoggerFactory.getLogger( CryptoStreamsTestBase.class); protected static CryptoCodec codec; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoCodec.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoCodec.java index 52e547ba40..eca23a7f2a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoCodec.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoCodec.java @@ -31,8 +31,6 @@ import java.util.Map; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.DataOutputBuffer; @@ -46,9 +44,12 @@ import org.junit.Test; import com.google.common.primitives.Longs; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestCryptoCodec { - private static final Log LOG= LogFactory.getLog(TestCryptoCodec.class); + private static final Logger LOG= LoggerFactory.getLogger(TestCryptoCodec + .class); private static byte[] key = new byte[16]; private static byte[] iv = new byte[16]; private static final int bufferSize = 4096; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java index 2e208d2176..73fd2802ab 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java @@ -32,8 +32,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem.Statistics; import org.apache.hadoop.test.GenericTestUtils; import org.junit.Assert; @@ -41,6 +39,8 @@ import com.google.common.base.Supplier; import com.google.common.util.concurrent.Uninterruptibles; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** *

@@ -48,7 +48,8 @@ *

*/ public abstract class FCStatisticsBaseTest { - private static final Log LOG = LogFactory.getLog(FCStatisticsBaseTest.class); + private static final Logger LOG = LoggerFactory.getLogger(FCStatisticsBaseTest + .class); static protected int blockSize = 512; static protected int numBlocks = 1; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java index 584ca40a3a..f5fb06f74a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java @@ -17,15 +17,16 @@ */ package org.apache.hadoop.fs; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.fail; public class TestFileContext { - private static final Log LOG = LogFactory.getLog(TestFileContext.class); + private static final Logger LOG = LoggerFactory.getLogger(TestFileContext + .class); @Test public void testDefaultURIWithoutScheme() throws Exception { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java index d29b1a4071..1962f49cce 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java @@ -33,16 +33,16 @@ import java.util.List; import org.junit.Test; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestFileStatus { - private static final Log LOG = - LogFactory.getLog(TestFileStatus.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestFileStatus.class); /** Values for creating {@link FileStatus} in some tests */ static final int LENGTH = 1; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java index 3c733d219b..0ad03fcbac 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java @@ -47,8 +47,6 @@ import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.StringUtils; @@ -59,9 +57,11 @@ import org.junit.Before; import org.junit.Ignore; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestFileUtil { - private static final Log LOG = LogFactory.getLog(TestFileUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(TestFileUtil.class); private static final File TEST_DIR = GenericTestUtils.getTestDir("fu"); private static final String FILE = "x"; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java index 1db72d2019..f9b2420067 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java @@ -31,8 +31,6 @@ import java.io.IOException; import java.io.PrintStream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.test.GenericTestUtils; @@ -40,9 +38,11 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestFsShellCopy { - static final Log LOG = LogFactory.getLog(TestFsShellCopy.class); + static final Logger LOG = LoggerFactory.getLogger(TestFsShellCopy.class); static Configuration conf; static FsShell shell; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java index 38ad65b044..77b2f445a4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java @@ -33,8 +33,6 @@ import java.util.LinkedList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.shell.CommandFactory; import org.apache.hadoop.fs.shell.FsCommand; @@ -46,14 +44,16 @@ import org.apache.hadoop.util.Shell; import org.junit.BeforeClass; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This test validates that chmod, chown, chgrp returning correct exit codes * */ public class TestFsShellReturnCode { - private static final Log LOG = LogFactory - .getLog("org.apache.hadoop.fs.TestFsShellReturnCode"); + private static final Logger LOG = LoggerFactory + .getLogger("org.apache.hadoop.fs.TestFsShellReturnCode"); private static final Configuration conf = new Configuration(); private static FileSystem fileSys; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellTouch.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellTouch.java index 89c886ef62..5fe4e39ade 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellTouch.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellTouch.java @@ -21,17 +21,17 @@ import static org.hamcrest.CoreMatchers.not; import static org.junit.Assert.assertThat; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.StringUtils; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestFsShellTouch { - static final Log LOG = LogFactory.getLog(TestFsShellTouch.class); + static final Logger LOG = LoggerFactory.getLogger(TestFsShellTouch.class); static FsShell shell; static LocalFileSystem lfs; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java index bacdbb73e4..a1aa4de2c0 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java @@ -18,8 +18,6 @@ package org.apache.hadoop.fs; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.fs.permission.AclStatus; @@ -30,6 +28,8 @@ import org.apache.hadoop.util.Progressable; import org.junit.Assert; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.lang.reflect.Method; @@ -48,7 +48,8 @@ @SuppressWarnings("deprecation") public class TestHarFileSystem { - public static final Log LOG = LogFactory.getLog(TestHarFileSystem.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestHarFileSystem.class); /** * FileSystem methods that must not be overwritten by diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractBondedFSContract.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractBondedFSContract.java index e7766f3d92..3d202dfd2c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractBondedFSContract.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractBondedFSContract.java @@ -18,11 +18,11 @@ package org.apache.hadoop.fs.contract; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.URI; @@ -39,8 +39,8 @@ */ public abstract class AbstractBondedFSContract extends AbstractFSContract { - private static final Log LOG = - LogFactory.getLog(AbstractBondedFSContract.class); + private static final Logger LOG = + LoggerFactory.getLogger(AbstractBondedFSContract.class); /** * Pattern for the option for test filesystems from schema diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java index ca01702217..6da51827c8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java @@ -32,8 +32,6 @@ import java.util.EnumSet; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.CreateFlag; @@ -49,6 +47,8 @@ import org.apache.hadoop.util.ToolRunner; import com.google.common.base.Preconditions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** The load generator is a tool for testing NameNode behavior under * different client loads. Note there is a subclass of this clas that lets @@ -129,7 +129,7 @@ * -scriptFile : text file to parse for scripted operation */ public class LoadGenerator extends Configured implements Tool { - public static final Log LOG = LogFactory.getLog(LoadGenerator.class); + public static final Logger LOG = LoggerFactory.getLogger(LoadGenerator.class); private volatile static boolean shouldRun = true; protected static Path root = DataGenerator.DEFAULT_ROOT; @@ -341,7 +341,7 @@ private void genFile(Path file, long fileSize) throws IOException { executionTime[WRITE_CLOSE] += (Time.now() - startTime); numOfOps[WRITE_CLOSE]++; } finally { - IOUtils.cleanup(LOG, out); + IOUtils.cleanupWithLogger(LOG, out); } } } @@ -651,7 +651,7 @@ protected static int loadScriptFile(String filename, boolean readLocally) throws System.err.println("Line: " + lineNum + ", " + e.getMessage()); return -1; } finally { - IOUtils.cleanup(LOG, br); + IOUtils.cleanupWithLogger(LOG, br); } // Copy vectors to arrays of values, to avoid autoboxing overhead later diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ActiveStandbyElectorTestUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ActiveStandbyElectorTestUtil.java index 23750816bd..764ad2e1e3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ActiveStandbyElectorTestUtil.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ActiveStandbyElectorTestUtil.java @@ -19,18 +19,18 @@ import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.test.MultithreadedTestUtil.TestContext; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Time; import org.apache.zookeeper.KeeperException.NoNodeException; import org.apache.zookeeper.data.Stat; import org.apache.zookeeper.server.ZooKeeperServer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public abstract class ActiveStandbyElectorTestUtil { - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( ActiveStandbyElectorTestUtil.class); private static final long LOG_INTERVAL_MS = 500; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java index 551da56007..6f01be89cd 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java @@ -23,8 +23,6 @@ import java.util.ArrayList; import com.google.protobuf.BlockingService; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; import org.apache.hadoop.ha.protocolPB.HAServiceProtocolPB; @@ -38,6 +36,8 @@ import org.mockito.Mockito; import com.google.common.collect.Lists; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.CommonConfigurationKeys.HA_HM_RPC_TIMEOUT_DEFAULT; @@ -46,7 +46,8 @@ * a mock implementation. */ class DummyHAService extends HAServiceTarget { - public static final Log LOG = LogFactory.getLog(DummyHAService.class); + public static final Logger LOG = LoggerFactory.getLogger(DummyHAService + .class); private static final String DUMMY_FENCE_KEY = "dummy.fence.key"; volatile HAServiceState state; HAServiceProtocol proxy, healthMonitorProxy; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/MiniZKFCCluster.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/MiniZKFCCluster.java index a5fbe8f03a..9146e01e6e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/MiniZKFCCluster.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/MiniZKFCCluster.java @@ -25,8 +25,6 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; @@ -41,6 +39,8 @@ import com.google.common.base.Preconditions; import com.google.common.primitives.Ints; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Harness for starting two dummy ZK FailoverControllers, associated with @@ -57,7 +57,8 @@ public class MiniZKFCCluster { private DummySharedResource sharedResource = new DummySharedResource(); - private static final Log LOG = LogFactory.getLog(MiniZKFCCluster.class); + private static final Logger LOG = LoggerFactory.getLogger(MiniZKFCCluster + .class); public MiniZKFCCluster(Configuration conf, ZooKeeperServer zks) { this.conf = conf; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java index 1d8f48e2d0..0e59aa1004 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java @@ -24,8 +24,6 @@ import java.io.PrintStream; import java.net.InetSocketAddress; -import org.apache.commons.logging.LogFactory; -import org.apache.commons.logging.Log; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; @@ -34,9 +32,11 @@ import com.google.common.base.Charsets; import com.google.common.base.Joiner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestHAAdmin { - private static final Log LOG = LogFactory.getLog(TestHAAdmin.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHAAdmin.class); private HAAdmin tool; private ByteArrayOutputStream errOutBytes = new ByteArrayOutputStream(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java index 6c46543779..8738372fc4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java @@ -23,8 +23,6 @@ import java.net.InetSocketAddress; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; @@ -34,9 +32,11 @@ import org.junit.Before; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestHealthMonitor { - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( TestHealthMonitor.class); /** How many times has createProxy been called */ diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java index 0e4a1caeef..7036175263 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java @@ -33,14 +33,14 @@ import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.net.NetUtils; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestGlobalFilter extends HttpServerFunctionalTest { - static final Log LOG = LogFactory.getLog(HttpServer2.class); + static final Logger LOG = LoggerFactory.getLogger(HttpServer2.class); static final Set RECORDS = new TreeSet(); /** A very simple filter that records accessed uri's */ diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java index baa6f91945..6ec6e0f965 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.http; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration.IntegerRanges; import org.apache.hadoop.fs.CommonConfigurationKeys; @@ -40,6 +38,8 @@ import org.junit.rules.ExpectedException; import org.mockito.Mockito; import org.mockito.internal.util.reflection.Whitebox; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.servlet.Filter; import javax.servlet.FilterChain; @@ -73,7 +73,7 @@ import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER; public class TestHttpServer extends HttpServerFunctionalTest { - static final Log LOG = LogFactory.getLog(TestHttpServer.class); + static final Logger LOG = LoggerFactory.getLogger(TestHttpServer.class); private static HttpServer2 server; private static final int MAX_THREADS = 10; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLogs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLogs.java index d72a958a0b..afd06acfca 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLogs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLogs.java @@ -18,8 +18,6 @@ package org.apache.hadoop.http; import org.apache.http.HttpStatus; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.http.resource.JerseyResource; @@ -27,12 +25,14 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.net.HttpURLConnection; import java.net.URL; public class TestHttpServerLogs extends HttpServerFunctionalTest { - static final Log LOG = LogFactory.getLog(TestHttpServerLogs.class); + static final Logger LOG = LoggerFactory.getLogger(TestHttpServerLogs.class); private static HttpServer2 server; @BeforeClass diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWebapps.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWebapps.java index c92944e57f..07dbc2a7c6 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWebapps.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWebapps.java @@ -19,8 +19,8 @@ import org.junit.Test; -import org.apache.commons.logging.LogFactory; -import org.apache.commons.logging.Log; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.FileNotFoundException; @@ -28,7 +28,8 @@ * Test webapp loading */ public class TestHttpServerWebapps extends HttpServerFunctionalTest { - private static final Log log = LogFactory.getLog(TestHttpServerWebapps.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestHttpServerWebapps.class); /** * Test that the test server is loadable on the classpath @@ -58,7 +59,7 @@ public void testMissingServerResource() throws Throwable { stop(server); fail("Expected an exception, got " + serverDescription); } catch (FileNotFoundException expected) { - log.debug("Expected exception " + expected, expected); + LOG.debug("Expected exception " + expected, expected); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWithSpengo.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWithSpengo.java index 3d3e020f41..5239ed6215 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWithSpengo.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWithSpengo.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.http; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.minikdc.MiniKdc; @@ -38,6 +36,8 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.Assert; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.File; import java.io.FileWriter; @@ -53,7 +53,8 @@ */ public class TestHttpServerWithSpengo { - static final Log LOG = LogFactory.getLog(TestHttpServerWithSpengo.class); + static final Logger LOG = + LoggerFactory.getLogger(TestHttpServerWithSpengo.class); private static final String SECRET_STR = "secret"; private static final String HTTP_USER = "HTTP"; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java index 09f31dff7b..4c35b391c3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java @@ -33,14 +33,14 @@ import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.net.NetUtils; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestPathFilter extends HttpServerFunctionalTest { - static final Log LOG = LogFactory.getLog(HttpServer2.class); + static final Logger LOG = LoggerFactory.getLogger(HttpServer2.class); static final Set RECORDS = new TreeSet(); /** A very simple filter that records accessed uri's */ diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java index 30aca57eef..5af6d6fb56 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java @@ -32,8 +32,6 @@ import javax.net.ssl.SSLSocket; import javax.net.ssl.SSLSocketFactory; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.io.IOUtils; @@ -45,6 +43,8 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This testcase issues SSL certificates configures the HttpServer to serve @@ -56,7 +56,8 @@ public class TestSSLHttpServer extends HttpServerFunctionalTest { private static final String BASEDIR = GenericTestUtils.getTempPath(TestSSLHttpServer.class.getSimpleName()); - private static final Log LOG = LogFactory.getLog(TestSSLHttpServer.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestSSLHttpServer.class); private static Configuration conf; private static HttpServer2 server; private static String keystoresDir; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java index f58c230021..eafd0ae9cc 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java @@ -32,15 +32,15 @@ import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.test.GenericTestUtils; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestServletFilter extends HttpServerFunctionalTest { - static final Log LOG = LogFactory.getLog(HttpServer2.class); + static final Logger LOG = LoggerFactory.getLogger(HttpServer2.class); static volatile String uri = null; /** A very simple filter which record the uri filtered. */ diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java index 607d17f57b..68a10a15d2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java @@ -30,10 +30,10 @@ import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.http.JettyUtils; import org.eclipse.jetty.util.ajax.JSON; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A simple Jersey resource class TestHttpServer. @@ -42,7 +42,7 @@ */ @Path("") public class JerseyResource { - static final Log LOG = LogFactory.getLog(JerseyResource.class); + static final Logger LOG = LoggerFactory.getLogger(JerseyResource.class); public static final String PATH = "path"; public static final String OP = "op"; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java index 505aca736c..722e9de595 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java @@ -21,13 +21,15 @@ import java.io.*; -import org.apache.commons.logging.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.conf.*; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import static org.junit.Assert.assertNull; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertEquals; @@ -37,7 +39,8 @@ /** Support for flat files of binary key/value pairs. */ public class TestArrayFile { - private static final Log LOG = LogFactory.getLog(TestArrayFile.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestArrayFile.class); private static final Path TEST_DIR = new Path(GenericTestUtils.getTempPath( TestMapFile.class.getSimpleName())); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestDefaultStringifier.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestDefaultStringifier.java index bd8f2ef537..b70e011f6a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestDefaultStringifier.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestDefaultStringifier.java @@ -21,16 +21,18 @@ import java.io.IOException; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import static org.junit.Assert.assertEquals; public class TestDefaultStringifier { private static Configuration conf = new Configuration(); - private static final Log LOG = LogFactory.getLog(TestDefaultStringifier.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestDefaultStringifier.class); private char[] alphabet = "abcdefghijklmnopqrstuvwxyz".toCharArray(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java index e97ab6a559..044824356e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java @@ -21,8 +21,6 @@ import java.io.*; import java.util.*; -import org.apache.commons.logging.*; - import org.apache.hadoop.fs.*; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.io.SequenceFile.Metadata; @@ -40,11 +38,14 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** Support for flat files of binary key/value pairs. */ public class TestSequenceFile { - private static final Log LOG = LogFactory.getLog(TestSequenceFile.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestSequenceFile.class); private Configuration conf = new Configuration(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java index 1fcfab673c..b6ec487458 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java @@ -21,14 +21,13 @@ import java.io.*; import java.util.*; - -import org.apache.commons.logging.*; - import org.apache.hadoop.fs.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.test.GenericTestUtils; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertEquals; @@ -37,7 +36,7 @@ /** Support for flat files of binary key/value pairs. */ public class TestSetFile { - private static final Log LOG = LogFactory.getLog(TestSetFile.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSetFile.class); private static String FILE = GenericTestUtils.getTempPath("test.set"); private static Configuration conf = new Configuration(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java index 92fb4ec94b..57359a0b86 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java @@ -20,15 +20,16 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; public class TestWritableUtils { - private static final Log LOG = LogFactory.getLog(TestWritableUtils.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestWritableUtils.class); private void testValue(int val, int vintlen) throws IOException { DataOutputBuffer buf = new DataOutputBuffer(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java index 1ea9dc8ada..133ff9a7ff 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java @@ -49,8 +49,6 @@ import java.util.zip.GZIPOutputStream; import org.apache.commons.codec.binary.Base64; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.FileStatus; @@ -81,10 +79,12 @@ import org.junit.Assert; import org.junit.Assume; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestCodec { - private static final Log LOG= LogFactory.getLog(TestCodec.class); + private static final Logger LOG= LoggerFactory.getLogger(TestCodec.class); private Configuration conf = new Configuration(); private int count = 10000; @@ -382,7 +382,7 @@ private static Path writeSplitTestFile(FileSystem fs, Random rand, } LOG.info("Wrote " + seq + " records to " + file); } finally { - IOUtils.cleanup(LOG, fout); + IOUtils.cleanupWithLogger(LOG, fout); CodecPool.returnCompressor(cmp); } return file; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressionStreamReuse.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressionStreamReuse.java index dd7bdd21f5..d56b4e1e6e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressionStreamReuse.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressionStreamReuse.java @@ -24,8 +24,6 @@ import java.io.IOException; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.DataOutputBuffer; @@ -36,12 +34,15 @@ import org.apache.hadoop.util.ReflectionUtils; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import static org.junit.Assert.assertTrue; import static org.junit.Assume.assumeTrue; public class TestCompressionStreamReuse { - private static final Log LOG = LogFactory - .getLog(TestCompressionStreamReuse.class); + private static final Logger LOG = LoggerFactory + .getLogger(TestCompressionStreamReuse.class); private Configuration conf = new Configuration(); private int count = 10000; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java index 436f10a3d1..6989905d75 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java @@ -46,8 +46,6 @@ import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows; import org.apache.commons.io.FileUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.FileSystem; @@ -56,12 +54,14 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.NativeCodeLoader; import org.apache.hadoop.util.Time; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.io.nativeio.NativeIO.POSIX.*; import static org.apache.hadoop.io.nativeio.NativeIO.POSIX.Stat.*; public class TestNativeIO { - static final Log LOG = LogFactory.getLog(TestNativeIO.class); + static final Logger LOG = LoggerFactory.getLogger(TestNativeIO.class); static final File TEST_DIR = GenericTestUtils.getTestDir("testnativeio"); @@ -619,8 +619,8 @@ public void testCopyFileUnbuffered() throws Exception { NativeIO.copyFileUnbuffered(srcFile, dstFile); Assert.assertEquals(srcFile.length(), dstFile.length()); } finally { - IOUtils.cleanup(LOG, channel); - IOUtils.cleanup(LOG, raSrcFile); + IOUtils.cleanupWithLogger(LOG, channel); + IOUtils.cleanupWithLogger(LOG, raSrcFile); FileUtils.deleteQuietly(TEST_DIR); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestSharedFileDescriptorFactory.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestSharedFileDescriptorFactory.java index 64abecdbf3..fbe3fb8118 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestSharedFileDescriptorFactory.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestSharedFileDescriptorFactory.java @@ -27,14 +27,15 @@ import org.junit.Before; import org.junit.Test; import org.apache.commons.lang.SystemUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.test.GenericTestUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestSharedFileDescriptorFactory { - static final Log LOG = LogFactory.getLog(TestSharedFileDescriptorFactory.class); + static final Logger LOG = + LoggerFactory.getLogger(TestSharedFileDescriptorFactory.class); private static final File TEST_BASE = GenericTestUtils.getTestDir(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAsyncIPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAsyncIPC.java index 3f2802f330..64c486c4b1 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAsyncIPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAsyncIPC.java @@ -18,8 +18,6 @@ package org.apache.hadoop.ipc; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.io.LongWritable; @@ -34,6 +32,8 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.InetSocketAddress; @@ -49,7 +49,7 @@ public class TestAsyncIPC { private static Configuration conf; - private static final Log LOG = LogFactory.getLog(TestAsyncIPC.class); + private static final Logger LOG = LoggerFactory.getLogger(TestAsyncIPC.class); static AsyncGetFuture getAsyncRpcResponseFuture() { @@ -183,7 +183,7 @@ public void run() { final long param = TestIPC.RANDOM.nextLong(); runCall(i, param); } catch (Exception e) { - LOG.fatal(String.format("Caller-%d Call-%d caught: %s", callerId, i, + LOG.error(String.format("Caller-%d Call-%d caught: %s", callerId, i, StringUtils.stringifyException(e))); failed = true; } @@ -219,7 +219,7 @@ private void waitForReturnValues(final int start, final int end) for (int i = start; i < end; i++) { LongWritable value = returnFutures.get(i).get(); if (expectedValues.get(i) != value.get()) { - LOG.fatal(String.format("Caller-%d Call-%d failed!", callerId, i)); + LOG.error(String.format("Caller-%d Call-%d failed!", callerId, i)); failed = true; break; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java index 4bda6377c5..3416746ab3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java @@ -58,8 +58,6 @@ import javax.net.SocketFactory; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; @@ -96,12 +94,13 @@ import com.google.common.base.Supplier; import com.google.common.primitives.Bytes; import com.google.common.primitives.Ints; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.slf4j.event.Level; /** Unit tests for IPC. */ public class TestIPC { - public static final Log LOG = - LogFactory.getLog(TestIPC.class); + public static final Logger LOG = LoggerFactory.getLogger(TestIPC.class); private static Configuration conf; final static int PING_INTERVAL = 1000; @@ -230,12 +229,12 @@ public void run() { final long param = RANDOM.nextLong(); LongWritable value = call(client, param, server, conf); if (value.get() != param) { - LOG.fatal("Call failed!"); + LOG.error("Call failed!"); failed = true; break; } } catch (Exception e) { - LOG.fatal("Caught: " + StringUtils.stringifyException(e)); + LOG.error("Caught: " + StringUtils.stringifyException(e)); failed = true; } } @@ -784,7 +783,7 @@ public void run() { call(client, new LongWritable(Thread.currentThread().getId()), addr, 60000, conf); } catch (Throwable e) { - LOG.error(e); + LOG.error(e.toString()); failures.incrementAndGet(); return; } finally { @@ -895,7 +894,7 @@ public void run() { callBarrier.await(); } } catch (Throwable t) { - LOG.error(t); + LOG.error(t.toString()); error.set(true); } } @@ -917,7 +916,7 @@ public void run() { callReturned.countDown(); Thread.sleep(10000); } catch (IOException e) { - LOG.error(e); + LOG.error(e.toString()); } catch (InterruptedException e) { } finally { client.stop(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java index 546cb8f472..7d7905e6b4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java @@ -32,8 +32,6 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.io.BytesWritable; @@ -45,6 +43,8 @@ import org.apache.hadoop.net.NetUtils; import org.junit.Assert; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This test provokes partial writes in the server, which is @@ -52,8 +52,8 @@ */ public class TestIPCServerResponder { - public static final Log LOG = - LogFactory.getLog(TestIPCServerResponder.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestIPCServerResponder.class); private static Configuration conf = new Configuration(); @@ -126,7 +126,7 @@ public void run() { call(client, param, address); Thread.sleep(RANDOM.nextInt(20)); } catch (Exception e) { - LOG.fatal("Caught Exception", e); + LOG.error("Caught Exception", e); failed = true; } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpcServerHandoff.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpcServerHandoff.java index f5fefe48d0..476b1979b2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpcServerHandoff.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpcServerHandoff.java @@ -29,18 +29,18 @@ import com.google.protobuf.BlockingService; import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.protobuf.TestProtos; import org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.TestProtobufRpcHandoffProto; import org.junit.Assert; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestProtoBufRpcServerHandoff { - public static final Log LOG = - LogFactory.getLog(TestProtoBufRpcServerHandoff.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestProtoBufRpcServerHandoff.class); @Test(timeout = 20000) public void test() throws Exception { @@ -83,8 +83,8 @@ public void test() throws Exception { ClientInvocationCallable callable1 = future1.get(); ClientInvocationCallable callable2 = future2.get(); - LOG.info(callable1); - LOG.info(callable2); + LOG.info(callable1.toString()); + LOG.info(callable2.toString()); // Ensure the 5 second sleep responses are within a reasonable time of each // other. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java index 8725cf426a..c6209d2483 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java @@ -20,8 +20,6 @@ import com.google.common.base.Supplier; import com.google.protobuf.ServiceException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; @@ -54,6 +52,8 @@ import org.junit.Test; import org.mockito.Mockito; import org.mockito.internal.util.reflection.Whitebox; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.slf4j.event.Level; import javax.net.SocketFactory; @@ -104,7 +104,7 @@ @SuppressWarnings("deprecation") public class TestRPC extends TestRpcBase { - public static final Log LOG = LogFactory.getLog(TestRPC.class); + public static final Logger LOG = LoggerFactory.getLogger(TestRPC.class); @Before public void setup() { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java index a06d9fdc01..ffee086fa9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java @@ -18,12 +18,12 @@ package org.apache.hadoop.ipc; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.junit.After; import org.junit.Before; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.lang.reflect.Method; @@ -39,8 +39,8 @@ public class TestRPCCompatibility { private static RPC.Server server; private ProtocolProxy proxy; - public static final Log LOG = - LogFactory.getLog(TestRPCCompatibility.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestRPCCompatibility.class); private static Configuration conf = new Configuration(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCServerShutdown.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCServerShutdown.java index 93af7d4aad..aee8893538 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCServerShutdown.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCServerShutdown.java @@ -19,11 +19,11 @@ package org.apache.hadoop.ipc; import com.google.protobuf.ServiceException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.junit.Before; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; @@ -42,7 +42,8 @@ @SuppressWarnings("deprecation") public class TestRPCServerShutdown extends TestRpcBase { - public static final Log LOG = LogFactory.getLog(TestRPCServerShutdown.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestRPCServerShutdown.class); @Before public void setup() { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcServerHandoff.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcServerHandoff.java index 3716bc3667..2e0b3daa22 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcServerHandoff.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcServerHandoff.java @@ -30,19 +30,19 @@ import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.net.NetUtils; import org.junit.Assert; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestRpcServerHandoff { - public static final Log LOG = - LogFactory.getLog(TestRpcServerHandoff.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestRpcServerHandoff.class); private static final String BIND_ADDRESS = "0.0.0.0"; private static final Configuration conf = new Configuration(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java index 7608cb439a..0b463a5130 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java @@ -20,8 +20,6 @@ import com.google.protobuf.ServiceException; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; @@ -43,6 +41,8 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.slf4j.event.Level; import javax.security.auth.callback.Callback; @@ -120,8 +120,7 @@ public TestSaslRPC(QualityOfProtection[] qop, this.saslPropertiesResolver = saslPropertiesResolver; } - public static final Log LOG = - LogFactory.getLog(TestSaslRPC.class); + public static final Logger LOG = LoggerFactory.getLogger(TestSaslRPC.class); static final String ERROR_MESSAGE = "Token is invalid"; static final String SERVER_KEYTAB_KEY = "test.ipc.server.keytab"; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestServer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestServer.java index afda5355da..420d6b9408 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestServer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestServer.java @@ -27,12 +27,12 @@ import java.net.InetSocketAddress; import java.net.ServerSocket; -import org.apache.commons.logging.Log; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.ipc.Server.Call; import org.junit.Test; +import org.slf4j.Logger; /** * This is intended to be a set of unit tests for the @@ -136,7 +136,7 @@ static class TestException3 extends Exception { public void testLogExceptions() throws Exception { final Configuration conf = new Configuration(); final Call dummyCall = new Call(0, 0, null, null); - Log logger = mock(Log.class); + Logger logger = mock(Logger.class); Server server = new Server("0.0.0.0", 0, LongWritable.class, 1, conf) { @Override public Writable call( diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestWeightedRoundRobinMultiplexer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestWeightedRoundRobinMultiplexer.java index 642817617e..d4bc06ad3c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestWeightedRoundRobinMultiplexer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestWeightedRoundRobinMultiplexer.java @@ -21,14 +21,15 @@ import static org.junit.Assert.assertEquals; import org.junit.Test; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.ipc.WeightedRoundRobinMultiplexer.IPC_CALLQUEUE_WRRMUX_WEIGHTS_KEY; public class TestWeightedRoundRobinMultiplexer { - public static final Log LOG = LogFactory.getLog(TestWeightedRoundRobinMultiplexer.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestWeightedRoundRobinMultiplexer.class); private WeightedRoundRobinMultiplexer mux; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java index 7e094edc09..7bc772f062 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java @@ -31,8 +31,6 @@ import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.metrics2.AbstractMetric; import org.apache.hadoop.metrics2.MetricsRecord; import org.apache.hadoop.metrics2.MetricsTag; @@ -47,9 +45,12 @@ import org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31; import org.apache.hadoop.metrics2.sink.ganglia.GangliaMetricsTestHelper; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestGangliaMetrics { - public static final Log LOG = LogFactory.getLog(TestMetricsSystemImpl.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestMetricsSystemImpl.class); // This is the prefix to locate the config file for this particular test // This is to avoid using the same config file with other test cases, // which can cause race conditions. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java index 2a62accc2b..b53be4d735 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java @@ -24,15 +24,16 @@ import static org.junit.Assert.*; import org.apache.commons.configuration2.Configuration; -import org.apache.commons.logging.LogFactory; -import org.apache.commons.logging.Log; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import static org.apache.hadoop.metrics2.impl.ConfigUtil.*; /** * Test metrics configuration */ public class TestMetricsConfig { - static final Log LOG = LogFactory.getLog(TestMetricsConfig.class); + static final Logger LOG = LoggerFactory.getLogger(TestMetricsConfig.class); /** * Common use cases diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java index cfea104d4d..abd1b132b0 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java @@ -42,8 +42,6 @@ import com.google.common.collect.Iterables; import org.apache.commons.configuration2.SubsetConfiguration; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.metrics2.MetricsException; import static org.apache.hadoop.test.MoreAsserts.*; @@ -61,14 +59,17 @@ import org.apache.hadoop.metrics2.lib.MutableGaugeLong; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test the MetricsSystemImpl class */ @RunWith(MockitoJUnitRunner.class) public class TestMetricsSystemImpl { - private static final Log LOG = LogFactory.getLog(TestMetricsSystemImpl.class); - + private static final Logger LOG = + LoggerFactory.getLogger(TestMetricsSystemImpl.class); + static { DefaultMetricsSystem.setMiniClusterMode(true); } @Captor private ArgumentCaptor r1; @@ -78,7 +79,7 @@ public class TestMetricsSystemImpl { public static class TestSink implements MetricsSink { @Override public void putMetrics(MetricsRecord record) { - LOG.debug(record); + LOG.debug(record.toString()); } @Override public void flush() {} @@ -420,7 +421,7 @@ public void flush() { } private void checkMetricsRecords(List recs) { - LOG.debug(recs); + LOG.debug(recs.toString()); MetricsRecord r = recs.get(0); assertEquals("name", "s1rec", r.name()); assertEquals("tags", new MetricsTag[] { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestSinkQueue.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestSinkQueue.java index 7da8d1b8c6..719130f5ba 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestSinkQueue.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestSinkQueue.java @@ -22,18 +22,20 @@ import java.util.concurrent.CountDownLatch; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import static org.junit.Assert.*; import static org.mockito.Mockito.*; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import static org.apache.hadoop.metrics2.impl.SinkQueue.*; /** * Test the half-blocking metrics sink queue */ public class TestSinkQueue { - private static final Log LOG = LogFactory.getLog(TestSinkQueue.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestSinkQueue.class); /** * Test common use case @@ -234,7 +236,7 @@ private void shouldThrowCME(Fun callback) throws Exception { callback.run(); } catch (ConcurrentModificationException e) { - LOG.info(e); + LOG.info(e.toString()); return; } LOG.error("should've thrown CME"); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMutableMetrics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMutableMetrics.java index fd716ae711..b0d7debe44 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMutableMetrics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMutableMetrics.java @@ -34,18 +34,19 @@ import java.util.Random; import java.util.concurrent.CountDownLatch; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.util.Quantile; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test metrics record builder interface and mutable metrics */ public class TestMutableMetrics { - private static final Log LOG = LogFactory.getLog(TestMutableMetrics.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestMutableMetrics.class); private final double EPSILON = 1e-42; /** diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestMetricsCache.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestMetricsCache.java index 7bee3a2008..e69947ecdc 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestMetricsCache.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestMetricsCache.java @@ -25,15 +25,17 @@ import static org.junit.Assert.*; import static org.mockito.Mockito.*; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.metrics2.AbstractMetric; import org.apache.hadoop.metrics2.MetricsRecord; import org.apache.hadoop.metrics2.MetricsTag; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import static org.apache.hadoop.metrics2.lib.Interns.*; public class TestMetricsCache { - private static final Log LOG = LogFactory.getLog(TestMetricsCache.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestMetricsCache.class); @SuppressWarnings("deprecation") @Test public void testUpdate() { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/ServerSocketUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/ServerSocketUtil.java index a294e745ee..80f2ebc98c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/ServerSocketUtil.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/ServerSocketUtil.java @@ -18,16 +18,17 @@ package org.apache.hadoop.net; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.IOException; import java.net.ServerSocket; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - public class ServerSocketUtil { - private static final Log LOG = LogFactory.getLog(ServerSocketUtil.class); + private static final Logger LOG = + LoggerFactory.getLogger(ServerSocketUtil.class); private static Random rand = new Random(); /** diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java index 863d380ac3..3aa0acdcf4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java @@ -28,11 +28,11 @@ import javax.naming.CommunicationException; import javax.naming.NameNotFoundException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.Time; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.hamcrest.CoreMatchers.not; @@ -44,7 +44,7 @@ */ public class TestDNS { - private static final Log LOG = LogFactory.getLog(TestDNS.class); + private static final Logger LOG = LoggerFactory.getLogger(TestDNS.class); private static final String DEFAULT = "default"; // This is not a legal hostname (starts with a hyphen). It will never diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java index 1375d9b0b9..fc1c10254f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java @@ -38,8 +38,6 @@ import java.util.concurrent.TimeUnit; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.security.NetUtilsTestResolver; @@ -47,10 +45,12 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestNetUtils { - private static final Log LOG = LogFactory.getLog(TestNetUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(TestNetUtils.class); private static final int DEST_PORT = 4040; private static final String DEST_PORT_NAME = Integer.toString(DEST_PORT); private static final int LOCAL_PORT = 8080; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestSocketIOWithTimeout.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestSocketIOWithTimeout.java index 649ba1264b..f1c03cf5df 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestSocketIOWithTimeout.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestSocketIOWithTimeout.java @@ -25,8 +25,6 @@ import java.nio.channels.Pipe; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.MultithreadedTestUtil; import org.apache.hadoop.test.MultithreadedTestUtil.TestContext; @@ -36,6 +34,9 @@ import org.apache.hadoop.io.nativeio.NativeIO; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import static org.junit.Assert.*; /** @@ -47,7 +48,8 @@ */ public class TestSocketIOWithTimeout { - static Log LOG = LogFactory.getLog(TestSocketIOWithTimeout.class); + static final Logger LOG = + LoggerFactory.getLogger(TestSocketIOWithTimeout.class); private static int TIMEOUT = 1*1000; private static String TEST_STRING = "1234567890"; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestStaticMapping.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestStaticMapping.java index f3c0a5cb25..a906c4aa85 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestStaticMapping.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestStaticMapping.java @@ -18,12 +18,12 @@ package org.apache.hadoop.net; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.junit.Assert; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.List; @@ -34,7 +34,8 @@ * Because the map is actually static, this map needs to be reset for every test */ public class TestStaticMapping extends Assert { - private static final Log LOG = LogFactory.getLog(TestStaticMapping.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestStaticMapping.class); /** * Reset the map then create a new instance of the {@link StaticMapping} diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocket.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocket.java index 8a5a0a4225..28b3cbe3fa 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocket.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocket.java @@ -203,7 +203,7 @@ public Void call() { } catch (IOException e) { throw new RuntimeException("unexpected IOException", e); } finally { - IOUtils.cleanup(DomainSocket.LOG, serverConn); + IOUtils.cleanupWithLogger(DomainSocket.LOG, serverConn); } return null; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocketWatcher.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocketWatcher.java index 4cc86a7a42..aa522f2660 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocketWatcher.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocketWatcher.java @@ -26,17 +26,18 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.junit.After; import org.junit.Assume; import org.junit.Before; import org.junit.Test; import com.google.common.util.concurrent.Uninterruptibles; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestDomainSocketWatcher { - static final Log LOG = LogFactory.getLog(TestDomainSocketWatcher.class); + static final Logger LOG = + LoggerFactory.getLogger(TestDomainSocketWatcher.class); private Throwable trappedException = null; @@ -141,7 +142,7 @@ public boolean handle(DomainSocket sock) { } } } catch (Throwable e) { - LOG.error(e); + LOG.error(e.toString()); throw new RuntimeException(e); } } @@ -169,7 +170,7 @@ public void run() { } } } catch (Throwable e) { - LOG.error(e); + LOG.error(e.toString()); throw new RuntimeException(e); } } @@ -212,7 +213,7 @@ public boolean handle(DomainSocket sock) { TimeUnit.MILLISECONDS.sleep(1); } } catch (Throwable e) { - LOG.error(e); + LOG.error(e.toString()); throw new RuntimeException(e); } } @@ -241,7 +242,7 @@ public void run() { } } } catch (Throwable e) { - LOG.error(e); + LOG.error(e.toString()); throw new RuntimeException(e); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCompositeGroupMapping.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCompositeGroupMapping.java index 79f56e065a..0a2d42c273 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCompositeGroupMapping.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCompositeGroupMapping.java @@ -24,16 +24,17 @@ import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestCompositeGroupMapping { - public static final Log LOG = LogFactory.getLog(TestCompositeGroupMapping.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestCompositeGroupMapping.class); private static Configuration conf = new Configuration(); private static class TestUser { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java index c4dbcac4c2..3293903e64 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java @@ -18,8 +18,6 @@ package org.apache.hadoop.security; import com.google.protobuf.ServiceException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.io.Text; @@ -34,6 +32,8 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.InetAddress; @@ -58,8 +58,8 @@ public class TestDoAsEffectiveUser extends TestRpcBase { private static final Configuration masterConf = new Configuration(); - public static final Log LOG = LogFactory - .getLog(TestDoAsEffectiveUser.class); + public static final Logger LOG = LoggerFactory + .getLogger(TestDoAsEffectiveUser.class); static { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupFallback.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupFallback.java index 85f17b1639..3ef3698495 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupFallback.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupFallback.java @@ -21,16 +21,17 @@ import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.test.GenericTestUtils; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.slf4j.event.Level; public class TestGroupFallback { - public static final Log LOG = LogFactory.getLog(TestGroupFallback.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestGroupFallback.class); @Test public void testGroupShell() throws Exception { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java index 930c45e020..46e36b3172 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java @@ -40,17 +40,17 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.fail; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.security.Groups; import org.apache.hadoop.security.ShellBasedUnixGroupsMapping; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestGroupsCaching { - public static final Log TESTLOG = LogFactory.getLog(TestGroupsCaching.class); + public static final Logger TESTLOG = + LoggerFactory.getLogger(TestGroupsCaching.class); private static String[] myGroups = {"grp1", "grp2"}; private Configuration conf; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedUnixGroupsMapping.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedUnixGroupsMapping.java index 6d9ea0853f..d3c9538641 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedUnixGroupsMapping.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedUnixGroupsMapping.java @@ -20,8 +20,6 @@ import java.io.IOException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.test.GenericTestUtils; @@ -30,6 +28,8 @@ import org.apache.hadoop.util.Shell.ExitCodeException; import org.apache.hadoop.util.Shell.ShellCommandExecutor; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.*; import static org.mockito.Mockito.doNothing; @@ -38,8 +38,8 @@ import static org.mockito.Mockito.when; public class TestShellBasedUnixGroupsMapping { - private static final Log TESTLOG = - LogFactory.getLog(TestShellBasedUnixGroupsMapping.class); + private static final Logger TESTLOG = + LoggerFactory.getLogger(TestShellBasedUnixGroupsMapping.class); private final GenericTestUtils.LogCapturer shellMappingLog = GenericTestUtils.LogCapturer.captureLogs( diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java index 354dade6c2..6fa59922b2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java @@ -23,8 +23,6 @@ import java.util.List; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -39,13 +37,16 @@ import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; public class TestCredentialProviderFactory { - public static final Log LOG = LogFactory.getLog(TestCredentialProviderFactory.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestCredentialProviderFactory.class); @Rule public final TestName test = new TestName(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java index ddf74d1b1e..0868381027 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java @@ -26,8 +26,6 @@ import java.util.Iterator; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -36,6 +34,8 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.NativeCodeLoader; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; @@ -45,8 +45,8 @@ @InterfaceStability.Evolving public class TestAccessControlList { - private static final Log LOG = - LogFactory.getLog(TestAccessControlList.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestAccessControlList.class); /** * Test the netgroups (groups in ACL rules that start with @) diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java index 577f11b929..9061fe752c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java @@ -25,8 +25,6 @@ import java.util.Arrays; import java.util.Collection; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.security.Groups; @@ -34,11 +32,13 @@ import org.apache.hadoop.util.NativeCodeLoader; import org.apache.hadoop.util.StringUtils; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestProxyUsers { - private static final Log LOG = - LogFactory.getLog(TestProxyUsers.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestProxyUsers.class); private static final String REAL_USER_NAME = "proxier"; private static final String PROXY_USER_NAME = "proxied_user"; private static final String AUTHORIZED_PROXY_USER_NAME = "authorized_proxied_user"; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java index b41ff15251..ad12f0baba 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java @@ -32,8 +32,6 @@ import org.junit.Assert; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.Text; @@ -49,11 +47,14 @@ import org.apache.hadoop.util.Daemon; import org.apache.hadoop.util.Time; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.*; public class TestDelegationToken { - private static final Log LOG = LogFactory.getLog(TestDelegationToken.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestDelegationToken.class); private static final Text KIND = new Text("MY KIND"); public static class TestDelegationTokenIdentifier diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestCompositeService.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestCompositeService.java index 6189c0ed19..ad3dfcf0c5 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestCompositeService.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestCompositeService.java @@ -18,12 +18,12 @@ package org.apache.hadoop.service; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.service.Service.STATE; import org.junit.Before; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -36,7 +36,8 @@ public class TestCompositeService { private static final int FAILED_SERVICE_SEQ_NUMBER = 2; - private static final Log LOG = LogFactory.getLog(TestCompositeService.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestCompositeService.class); /** * flag to state policy of CompositeService, and hence diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestServiceLifecycle.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestServiceLifecycle.java index cf9ca32524..f72e130d75 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestServiceLifecycle.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestServiceLifecycle.java @@ -19,8 +19,6 @@ package org.apache.hadoop.service; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.service.LoggingStateChangeListener; @@ -28,9 +26,12 @@ import org.apache.hadoop.service.ServiceStateChangeListener; import org.apache.hadoop.service.ServiceStateException; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestServiceLifecycle extends ServiceAssert { - private static Log LOG = LogFactory.getLog(TestServiceLifecycle.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestServiceLifecycle.class); /** * Walk the {@link BreakableService} through it's lifecycle, diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MetricsAsserts.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MetricsAsserts.java index b2f6054fe8..caa65c5da0 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MetricsAsserts.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MetricsAsserts.java @@ -32,8 +32,6 @@ import org.mockito.ArgumentCaptor; import org.mockito.ArgumentMatcher; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.metrics2.MetricsInfo; import org.apache.hadoop.metrics2.MetricsCollector; import org.apache.hadoop.metrics2.MetricsSource; @@ -42,6 +40,8 @@ import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.MutableQuantiles; import org.apache.hadoop.metrics2.util.Quantile; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.metrics2.lib.Interns.*; @@ -50,7 +50,7 @@ */ public class MetricsAsserts { - final static Log LOG = LogFactory.getLog(MetricsAsserts.class); + final static Logger LOG = LoggerFactory.getLogger(MetricsAsserts.class); private static final double EPSILON = 0.00001; public static MetricsSystem mockMetricsSystem() { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MultithreadedTestUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MultithreadedTestUtil.java index b51329f72d..217c2f84eb 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MultithreadedTestUtil.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MultithreadedTestUtil.java @@ -20,9 +20,9 @@ import java.util.HashSet; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.Time; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A utility to easily test threaded/synchronized code. @@ -60,8 +60,8 @@ */ public abstract class MultithreadedTestUtil { - public static final Log LOG = - LogFactory.getLog(MultithreadedTestUtil.class); + public static final Logger LOG = + LoggerFactory.getLogger(MultithreadedTestUtil.class); /** * TestContext is used to setup the multithreaded test runner. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TestGenericTestUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TestGenericTestUtils.java index c1d45ccd29..fb7bd22fed 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TestGenericTestUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TestGenericTestUtils.java @@ -18,9 +18,6 @@ package org.apache.hadoop.test; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import org.junit.Test; import org.slf4j.Logger; @@ -90,7 +87,7 @@ public String toString() { @Test(timeout = 10000) public void testLogCapturer() { - final Log log = LogFactory.getLog(TestGenericTestUtils.class); + final Logger log = LoggerFactory.getLogger(TestGenericTestUtils.class); LogCapturer logCapturer = LogCapturer.captureLogs(log); final String infoMessage = "info message"; // test get output message diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TestJUnitSetup.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TestJUnitSetup.java index d6ae04d71b..4c0b965a97 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TestJUnitSetup.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TestJUnitSetup.java @@ -18,13 +18,14 @@ package org.apache.hadoop.test; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.junit.Assert; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestJUnitSetup { - public static final Log LOG = LogFactory.getLog(TestJUnitSetup.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestJUnitSetup.class); @Test public void testJavaAssert() { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestAsyncDiskService.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestAsyncDiskService.java index 075ef69fd3..58935f24e8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestAsyncDiskService.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestAsyncDiskService.java @@ -19,17 +19,18 @@ import junit.framework.TestCase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.AsyncDiskService; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A test for AsyncDiskService. */ public class TestAsyncDiskService extends TestCase { - public static final Log LOG = LogFactory.getLog(TestAsyncDiskService.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestAsyncDiskService.class); // Access by multiple threads from the ThreadPools in AsyncDiskService. volatile int count; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClasspath.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClasspath.java index a38c3d764a..529887b297 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClasspath.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClasspath.java @@ -28,21 +28,22 @@ import java.util.jar.JarFile; import java.util.jar.Manifest; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.test.GenericTestUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests covering the classpath command-line utility. */ public class TestClasspath { - private static final Log LOG = LogFactory.getLog(TestClasspath.class); + private static final Logger LOG = LoggerFactory.getLogger(TestClasspath + .class); private static final File TEST_DIR = GenericTestUtils.getTestDir( "TestClasspath"); private static final Charset UTF8 = Charset.forName("UTF-8"); @@ -75,7 +76,7 @@ public void setUp() { public void tearDown() { System.setOut(oldStdout); System.setErr(oldStderr); - IOUtils.cleanup(LOG, printStdout, printStderr); + IOUtils.cleanupWithLogger(LOG, printStdout, printStderr); assertTrue(FileUtil.fullyDelete(TEST_DIR)); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestFindClass.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestFindClass.java index 28389c27d5..3a4ebd5f7b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestFindClass.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestFindClass.java @@ -20,17 +20,18 @@ import java.io.ByteArrayOutputStream; import java.io.PrintStream; import junit.framework.Assert; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.FindClass; import org.apache.hadoop.util.ToolRunner; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test the find class logic */ public class TestFindClass extends Assert { - private static final Log LOG = LogFactory.getLog(TestFindClass.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestFindClass.class); public static final String LOG4J_PROPERTIES = "log4j.properties"; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestIdentityHashStore.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestIdentityHashStore.java index bd74855298..2c27b762c4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestIdentityHashStore.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestIdentityHashStore.java @@ -23,14 +23,15 @@ import org.junit.Assert; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.IdentityHashStore; import org.apache.hadoop.util.IdentityHashStore.Visitor; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestIdentityHashStore { - private static final Log LOG = LogFactory.getLog(TestIdentityHashStore.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(TestIdentityHashStore.class.getName()); private static class Key { private final String name; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightGSet.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightGSet.java index 671dd37cf4..3751253062 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightGSet.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightGSet.java @@ -21,15 +21,16 @@ import java.util.Iterator; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.LightWeightGSet.LinkedElement; import org.junit.Assert; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** Testing {@link LightWeightGSet} */ public class TestLightWeightGSet { - public static final Log LOG = LogFactory.getLog(TestLightWeightGSet.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestLightWeightGSet.class); private static ArrayList getRandomList(int length, int randomSeed) { Random random = new Random(randomSeed); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightResizableGSet.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightResizableGSet.java index 32500922a3..19f213d31a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightResizableGSet.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightResizableGSet.java @@ -23,15 +23,16 @@ import java.util.Random; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.*; /** Testing {@link LightWeightResizableGSet} */ public class TestLightWeightResizableGSet { - public static final Log LOG = LogFactory.getLog(TestLightWeightResizableGSet.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestLightWeightResizableGSet.class); private Random random = new Random(); private TestElement[] generateElements(int length) { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeCodeLoader.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeCodeLoader.java index 473c17738e..58874fdcdf 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeCodeLoader.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeCodeLoader.java @@ -20,16 +20,16 @@ import org.junit.Test; import static org.junit.Assert.*; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.crypto.OpensslCipher; import org.apache.hadoop.io.compress.Lz4Codec; import org.apache.hadoop.io.compress.SnappyCodec; import org.apache.hadoop.io.compress.zlib.ZlibFactory; import org.apache.hadoop.util.NativeCodeLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestNativeCodeLoader { - static final Log LOG = LogFactory.getLog(TestNativeCodeLoader.class); + static final Logger LOG = LoggerFactory.getLogger(TestNativeCodeLoader.class); private static boolean requireTestJni() { String rtj = System.getProperty("require.test.libhadoop"); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestSignalLogger.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestSignalLogger.java index 73323eaa7a..a9fa4c64e9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestSignalLogger.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestSignalLogger.java @@ -19,21 +19,22 @@ package org.apache.hadoop.util; import org.apache.commons.lang.SystemUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.junit.Assert; import org.junit.Assume; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestSignalLogger { - public static final Log LOG = LogFactory.getLog(TestSignalLogger.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestSignalLogger.class); @Test(timeout=60000) public void testInstall() throws Exception { Assume.assumeTrue(SystemUtils.IS_OS_UNIX); - SignalLogger.INSTANCE.register(LOG); + SignalLogger.INSTANCE.register(LogAdapter.create(LOG)); try { - SignalLogger.INSTANCE.register(LOG); + SignalLogger.INSTANCE.register(LogAdapter.create(LOG)); Assert.fail("expected IllegalStateException from double registration"); } catch (IllegalStateException e) { // fall through diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java index e45890cb04..baf4251c3e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java @@ -29,13 +29,13 @@ import java.io.IOException; import org.apache.commons.io.FileUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.test.GenericTestUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.hamcrest.CoreMatchers.*; @@ -44,7 +44,7 @@ */ public class TestWinUtils { - private static final Log LOG = LogFactory.getLog(TestWinUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(TestWinUtils.class); private static File TEST_DIR = GenericTestUtils.getTestDir( TestWinUtils.class.getSimpleName()); diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java index 1ef00856db..8c13b4f452 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java @@ -19,13 +19,13 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.oncrpc.RpcProgram; import org.apache.hadoop.oncrpc.SimpleTcpServer; import org.apache.hadoop.oncrpc.SimpleUdpServer; import org.apache.hadoop.portmap.PortmapMapping; import org.apache.hadoop.util.ShutdownHookManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.util.ExitUtil.terminate; @@ -37,7 +37,7 @@ * handle for requested directory and returns it to the client. */ abstract public class MountdBase { - public static final Log LOG = LogFactory.getLog(MountdBase.class); + public static final Logger LOG = LoggerFactory.getLogger(MountdBase.class); private final RpcProgram rpcProgram; private int udpBoundPort; // Will set after server starts private int tcpBoundPort; // Will set after server starts @@ -63,7 +63,7 @@ private void startUDPServer() { try { udpServer.run(); } catch (Throwable e) { - LOG.fatal("Failed to start the UDP server.", e); + LOG.error("Failed to start the UDP server.", e); if (udpServer.getBoundPort() > 0) { rpcProgram.unregister(PortmapMapping.TRANSPORT_UDP, udpServer.getBoundPort()); @@ -82,7 +82,7 @@ private void startTCPServer() { try { tcpServer.run(); } catch (Throwable e) { - LOG.fatal("Failed to start the TCP server.", e); + LOG.error("Failed to start the TCP server.", e); if (tcpServer.getBoundPort() > 0) { rpcProgram.unregister(PortmapMapping.TRANSPORT_TCP, tcpServer.getBoundPort()); @@ -103,7 +103,7 @@ public void start(boolean register) { rpcProgram.register(PortmapMapping.TRANSPORT_UDP, udpBoundPort); rpcProgram.register(PortmapMapping.TRANSPORT_TCP, tcpBoundPort); } catch (Throwable e) { - LOG.fatal("Failed to register the MOUNT service.", e); + LOG.error("Failed to register the MOUNT service.", e); terminate(1, e); } } diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java index a299ff08d4..3d5088d700 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java @@ -22,8 +22,6 @@ import java.util.List; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.commons.net.util.SubnetUtils; import org.apache.commons.net.util.SubnetUtils.SubnetInfo; import org.apache.hadoop.conf.Configuration; @@ -35,6 +33,8 @@ import org.apache.hadoop.util.StringUtils; import com.google.common.base.Preconditions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class provides functionality for loading and checking the mapping @@ -64,7 +64,7 @@ public static synchronized NfsExports getInstance(Configuration conf) { return exports; } - public static final Log LOG = LogFactory.getLog(NfsExports.class); + public static final Logger LOG = LoggerFactory.getLogger(NfsExports.class); // only support IPv4 now private static final String IP_ADDRESS = diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java index bff5eecee7..5b327986f1 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java @@ -23,9 +23,9 @@ import java.security.NoSuchAlgorithmException; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.oncrpc.XDR; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This is a file handle use by the NFS clients. @@ -33,7 +33,7 @@ * on subsequent operations to reference the file. */ public class FileHandle { - private static final Log LOG = LogFactory.getLog(FileHandle.class); + private static final Logger LOG = LoggerFactory.getLogger(FileHandle.class); private static final String HEXES = "0123456789abcdef"; private static final int HANDLE_LEN = 32; private byte[] handle; // Opaque handle diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java index 80faca56f6..00e6d9f70c 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java @@ -17,13 +17,13 @@ */ package org.apache.hadoop.nfs.nfs3; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.oncrpc.RpcProgram; import org.apache.hadoop.oncrpc.SimpleTcpServer; import org.apache.hadoop.portmap.PortmapMapping; import org.apache.hadoop.util.ShutdownHookManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.util.ExitUtil.terminate; @@ -32,7 +32,7 @@ * Only TCP server is supported and UDP is not supported. */ public abstract class Nfs3Base { - public static final Log LOG = LogFactory.getLog(Nfs3Base.class); + public static final Logger LOG = LoggerFactory.getLogger(Nfs3Base.class); private final RpcProgram rpcProgram; private int nfsBoundPort; // Will set after server starts @@ -54,7 +54,7 @@ public void start(boolean register) { try { rpcProgram.register(PortmapMapping.TRANSPORT_TCP, nfsBoundPort); } catch (Throwable e) { - LOG.fatal("Failed to register the NFSv3 service.", e); + LOG.error("Failed to register the NFSv3 service.", e); terminate(1, e); } } @@ -67,7 +67,7 @@ private void startTCPServer() { try { tcpServer.run(); } catch (Throwable e) { - LOG.fatal("Failed to start the TCP server.", e); + LOG.error("Failed to start the TCP server.", e); if (tcpServer.getBoundPort() > 0) { rpcProgram.unregister(PortmapMapping.TRANSPORT_TCP, tcpServer.getBoundPort()); diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RegistrationClient.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RegistrationClient.java index 7ba37c9810..c8528ba4d5 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RegistrationClient.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RegistrationClient.java @@ -19,18 +19,19 @@ import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.oncrpc.RpcAcceptedReply.AcceptState; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.MessageEvent; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A simple client that registers an RPC program with portmap. */ public class RegistrationClient extends SimpleTcpClient { - public static final Log LOG = LogFactory.getLog(RegistrationClient.class); + public static final Logger LOG = + LoggerFactory.getLogger(RegistrationClient.class); public RegistrationClient(String host, int port, XDR request) { super(host, port, request); diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java index aa4b948d58..04fddecea5 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java @@ -17,18 +17,18 @@ */ package org.apache.hadoop.oncrpc; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.oncrpc.security.Credentials; import org.apache.hadoop.oncrpc.security.Verifier; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Represents an RPC message of type RPC call as defined in RFC 1831 */ public class RpcCall extends RpcMessage { public static final int RPC_VERSION = 2; - private static final Log LOG = LogFactory.getLog(RpcCall.class); - + private static final Logger LOG = LoggerFactory.getLogger(RpcCall.class); + public static RpcCall read(XDR xdr) { return new RpcCall(xdr.readInt(), RpcMessage.Type.fromValue(xdr.readInt()), xdr.readInt(), xdr.readInt(), xdr.readInt(), xdr.readInt(), @@ -60,7 +60,7 @@ protected RpcCall(int xid, RpcMessage.Type messageType, int rpcVersion, this.credentials = credential; this.verifier = verifier; if (LOG.isTraceEnabled()) { - LOG.trace(this); + LOG.trace(this.toString()); } validate(); } diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java index c541cd660b..5c059aa455 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java @@ -23,8 +23,6 @@ import java.net.SocketAddress; import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.oncrpc.RpcAcceptedReply.AcceptState; import org.apache.hadoop.oncrpc.security.Verifier; import org.apache.hadoop.oncrpc.security.VerifierNone; @@ -35,13 +33,15 @@ import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.MessageEvent; import org.jboss.netty.channel.SimpleChannelUpstreamHandler; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Class for writing RPC server programs based on RFC 1050. Extend this class * and implement {@link #handleInternal} to handle the requests received. */ public abstract class RpcProgram extends SimpleChannelUpstreamHandler { - static final Log LOG = LogFactory.getLog(RpcProgram.class); + static final Logger LOG = LoggerFactory.getLogger(RpcProgram.class); public static final int RPCB_PORT = 111; private final String program; private final String host; diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcUtil.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcUtil.java index cbc9943eaa..cebebd27d0 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcUtil.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcUtil.java @@ -19,8 +19,6 @@ import java.nio.ByteBuffer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffers; import org.jboss.netty.channel.Channel; @@ -29,6 +27,8 @@ import org.jboss.netty.channel.MessageEvent; import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.handler.codec.frame.FrameDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public final class RpcUtil { /** @@ -63,7 +63,8 @@ public static FrameDecoder constructRpcFrameDecoder() { * each RPC client. */ static class RpcFrameDecoder extends FrameDecoder { - public static final Log LOG = LogFactory.getLog(RpcFrameDecoder.class); + public static final Logger LOG = + LoggerFactory.getLogger(RpcFrameDecoder.class); private ChannelBuffer currentFrame; @Override @@ -107,8 +108,8 @@ protected Object decode(ChannelHandlerContext ctx, Channel channel, * request into a RpcInfo instance. */ static final class RpcMessageParserStage extends SimpleChannelUpstreamHandler { - private static final Log LOG = LogFactory - .getLog(RpcMessageParserStage.class); + private static final Logger LOG = LoggerFactory + .getLogger(RpcMessageParserStage.class); @Override public void messageReceived(ChannelHandlerContext ctx, MessageEvent e) diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpClientHandler.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpClientHandler.java index b72153a312..23b6682361 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpClientHandler.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpClientHandler.java @@ -17,20 +17,21 @@ */ package org.apache.hadoop.oncrpc; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.ChannelStateEvent; import org.jboss.netty.channel.ExceptionEvent; import org.jboss.netty.channel.MessageEvent; import org.jboss.netty.channel.SimpleChannelHandler; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A simple TCP based RPC client handler used by {@link SimpleTcpServer}. */ public class SimpleTcpClientHandler extends SimpleChannelHandler { - public static final Log LOG = LogFactory.getLog(SimpleTcpClient.class); + public static final Logger LOG = + LoggerFactory.getLogger(SimpleTcpClient.class); protected final XDR request; public SimpleTcpClientHandler(XDR request) { diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpServer.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpServer.java index bd48b15858..177fa3d80b 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpServer.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpServer.java @@ -20,8 +20,6 @@ import java.net.InetSocketAddress; import java.util.concurrent.Executors; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.jboss.netty.bootstrap.ServerBootstrap; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFactory; @@ -30,12 +28,15 @@ import org.jboss.netty.channel.Channels; import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Simple UDP server implemented using netty. */ public class SimpleTcpServer { - public static final Log LOG = LogFactory.getLog(SimpleTcpServer.class); + public static final Logger LOG = + LoggerFactory.getLogger(SimpleTcpServer.class); protected final int port; protected int boundPort = -1; // Will be set after server starts protected final SimpleChannelUpstreamHandler rpcProgram; diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpServer.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpServer.java index d691abad35..e65003ca64 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpServer.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpServer.java @@ -20,20 +20,21 @@ import java.net.InetSocketAddress; import java.util.concurrent.Executors; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.jboss.netty.bootstrap.ConnectionlessBootstrap; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.Channels; import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.channel.socket.DatagramChannelFactory; import org.jboss.netty.channel.socket.nio.NioDatagramChannelFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Simple UDP server implemented based on netty. */ public class SimpleUdpServer { - public static final Log LOG = LogFactory.getLog(SimpleUdpServer.class); + public static final Logger LOG = + LoggerFactory.getLogger(SimpleUdpServer.class); private final int SEND_BUFFER_SIZE = 65536; private final int RECEIVE_BUFFER_SIZE = 65536; diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java index fe4350be17..64edf485b2 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java @@ -18,16 +18,16 @@ package org.apache.hadoop.oncrpc.security; import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.oncrpc.XDR; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Base class for all credentials. Currently we only support 3 different types * of auth flavors: AUTH_NONE, AUTH_SYS, and RPCSEC_GSS. */ public abstract class Credentials extends RpcAuthInfo { - public static final Log LOG = LogFactory.getLog(Credentials.class); + public static final Logger LOG = LoggerFactory.getLogger(Credentials.class); public static Credentials readFlavorAndCredentials(XDR xdr) { AuthFlavor flavor = AuthFlavor.fromValue(xdr.readInt()); diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/SecurityHandler.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/SecurityHandler.java index 93efba89a5..4a674e81de 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/SecurityHandler.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/SecurityHandler.java @@ -19,13 +19,14 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.oncrpc.RpcCall; import org.apache.hadoop.oncrpc.XDR; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public abstract class SecurityHandler { - public static final Log LOG = LogFactory.getLog(SecurityHandler.class); + public static final Logger LOG = + LoggerFactory.getLogger(SecurityHandler.class); public abstract String getUser(); diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/Portmap.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/Portmap.java index 7586fdad67..123999d5e1 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/Portmap.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/Portmap.java @@ -22,8 +22,6 @@ import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.oncrpc.RpcProgram; import org.apache.hadoop.oncrpc.RpcUtil; import org.apache.hadoop.util.StringUtils; @@ -41,12 +39,14 @@ import org.jboss.netty.util.HashedWheelTimer; import com.google.common.annotations.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Portmap service for binding RPC protocols. See RFC 1833 for details. */ final class Portmap { - private static final Log LOG = LogFactory.getLog(Portmap.class); + private static final Logger LOG = LoggerFactory.getLogger(Portmap.class); private static final int DEFAULT_IDLE_TIME_MILLISECONDS = 5000; private ConnectionlessBootstrap udpServer; @@ -65,7 +65,7 @@ public static void main(String[] args) { pm.start(DEFAULT_IDLE_TIME_MILLISECONDS, new InetSocketAddress(port), new InetSocketAddress(port)); } catch (Throwable e) { - LOG.fatal("Failed to start the server. Cause:", e); + LOG.error("Failed to start the server. Cause:", e); pm.shutdown(); System.exit(-1); } diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java index 67175d0640..0bc380f614 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java @@ -19,8 +19,6 @@ import java.util.concurrent.ConcurrentHashMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.oncrpc.RpcAcceptedReply; import org.apache.hadoop.oncrpc.RpcCall; import org.apache.hadoop.oncrpc.RpcInfo; @@ -39,6 +37,8 @@ import org.jboss.netty.handler.timeout.IdleState; import org.jboss.netty.handler.timeout.IdleStateAwareChannelUpstreamHandler; import org.jboss.netty.handler.timeout.IdleStateEvent; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; final class RpcProgramPortmap extends IdleStateAwareChannelUpstreamHandler { static final int PROGRAM = 100000; @@ -51,7 +51,8 @@ final class RpcProgramPortmap extends IdleStateAwareChannelUpstreamHandler { static final int PMAPPROC_DUMP = 4; static final int PMAPPROC_GETVERSADDR = 9; - private static final Log LOG = LogFactory.getLog(RpcProgramPortmap.class); + private static final Logger LOG = + LoggerFactory.getLogger(RpcProgramPortmap.class); private final ConcurrentHashMap map = new ConcurrentHashMap();