HADOOP-14587. Use GenericTestUtils.setLogLevel when available in hadoop-common. Contributed by Wenxin He.

This commit is contained in:
Akira Ajisaka 2017-07-08 02:54:24 +09:00
parent 8fc5dcc2a1
commit 7cd095272c
No known key found for this signature in database
GPG Key ID: C1EDBB9CA400FD50
24 changed files with 104 additions and 59 deletions

View File

@ -20,7 +20,6 @@
import java.io.IOException;
import org.apache.log4j.Level;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
@ -30,6 +29,7 @@
import static org.apache.hadoop.fs.contract.ContractTestUtils.assertIsFile;
import org.apache.hadoop.test.GenericTestUtils;
import org.slf4j.event.Level;
/**
* <p>

View File

@ -23,6 +23,7 @@
import java.util.List;
import java.util.StringTokenizer;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Assert;
import org.apache.hadoop.fs.permission.FsPermission;
@ -32,6 +33,7 @@
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.event.Level;
import static org.apache.hadoop.fs.FileContextTestHelper.*;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
@ -61,8 +63,7 @@ public abstract class FileContextPermissionBase {
{
try {
((org.apache.commons.logging.impl.Log4JLogger)FileSystem.LOG).getLogger()
.setLevel(org.apache.log4j.Level.DEBUG);
GenericTestUtils.setLogLevel(FileSystem.LOG, Level.DEBUG);
}
catch(Exception e) {
System.out.println("Cannot change log level\n"

View File

@ -23,10 +23,12 @@
import java.util.Arrays;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.StringUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.event.Level;
/**
* <p>
@ -48,8 +50,7 @@ public abstract class FileContextUtilBase {
{
try {
((org.apache.commons.logging.impl.Log4JLogger)FileSystem.LOG).getLogger()
.setLevel(org.apache.log4j.Level.DEBUG);
GenericTestUtils.setLogLevel(FileSystem.LOG, Level.DEBUG);
} catch(Exception e) {
System.out.println("Cannot change log level\n"
+ StringUtils.stringifyException(e));

View File

@ -24,18 +24,18 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level;
import static org.junit.Assert.*;
import org.junit.Test;
import org.junit.BeforeClass;
import org.slf4j.event.Level;
/**
* This class tests the FileStatus API.
*/
public class TestListFiles {
static {
GenericTestUtils.setLogLevel(FileSystem.LOG, Level.ALL);
GenericTestUtils.setLogLevel(FileSystem.LOG, Level.TRACE);
}
static final long seed = 0xDEADBEEFL;

View File

@ -21,10 +21,10 @@
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Shell;
import org.apache.log4j.Level;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.event.Level;
import java.io.IOException;
import java.util.ArrayList;

View File

@ -24,12 +24,11 @@
import java.util.Collections;
import java.util.UUID;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.ha.ActiveStandbyElector.ActiveStandbyElectorCallback;
import org.apache.hadoop.ha.ActiveStandbyElector.State;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.ZKUtil.ZKAuthInfo;
import org.apache.log4j.Level;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.ZooDefs.Ids;
import org.apache.zookeeper.ZooKeeper;
@ -39,6 +38,7 @@
import org.mockito.Mockito;
import com.google.common.primitives.Ints;
import org.slf4j.event.Level;
/**
* Test for {@link ActiveStandbyElector} using real zookeeper.
@ -47,8 +47,7 @@ public class TestActiveStandbyElectorRealZK extends ClientBaseWithFixes {
static final int NUM_ELECTORS = 2;
static {
((Log4JLogger)ActiveStandbyElector.LOG).getLogger().setLevel(
Level.ALL);
GenericTestUtils.setLogLevel(ActiveStandbyElector.LOG, Level.TRACE);
}
static final String PARENT_DIR = "/" + UUID.randomUUID();

View File

@ -21,18 +21,18 @@
import java.net.InetSocketAddress;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
import org.apache.hadoop.ha.SshFenceByTcpPort.Args;
import org.apache.log4j.Level;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Assume;
import org.junit.Test;
import org.slf4j.event.Level;
public class TestSshFenceByTcpPort {
static {
((Log4JLogger)SshFenceByTcpPort.LOG).getLogger().setLevel(Level.ALL);
GenericTestUtils.setLogLevel(SshFenceByTcpPort.LOG, Level.TRACE);
}
private static String TEST_FENCING_HOST = System.getProperty(

View File

@ -22,7 +22,6 @@
import java.security.NoSuchAlgorithmException;
import com.google.common.base.Supplier;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
import org.apache.hadoop.ha.HAServiceProtocol.StateChangeRequestInfo;
@ -30,7 +29,6 @@
import org.apache.hadoop.ha.MiniZKFCCluster.DummyZKFC;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Time;
import org.apache.log4j.Level;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.data.Stat;
@ -41,6 +39,7 @@
import org.junit.Test;
import org.junit.rules.Timeout;
import org.mockito.Mockito;
import org.slf4j.event.Level;
public class TestZKFailoverController extends ClientBaseWithFixes {
private Configuration conf;
@ -71,7 +70,7 @@ public class TestZKFailoverController extends ClientBaseWithFixes {
"digest:" + DIGEST_USER_HASH + ":rwcda";
static {
((Log4JLogger)ActiveStandbyElector.LOG).getLogger().setLevel(Level.ALL);
GenericTestUtils.setLogLevel(ActiveStandbyElector.LOG, Level.TRACE);
}
@Before

View File

@ -18,21 +18,21 @@
package org.apache.hadoop.io.serializer;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertNotNull;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.io.Writable;
import org.apache.log4j.Level;
import org.slf4j.event.Level;
public class TestSerializationFactory {
static {
((Log4JLogger) SerializationFactory.LOG).getLogger().setLevel(Level.ALL);
GenericTestUtils.setLogLevel(SerializationFactory.LOG, Level.TRACE);
}
static Configuration conf;

View File

@ -26,9 +26,9 @@
import java.util.Arrays;
import java.util.Enumeration;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Assert;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.net.NetUtils;
@ -43,8 +43,7 @@
import org.apache.hadoop.security.token.delegation.TestDelegationToken.TestDelegationTokenIdentifier;
import org.apache.hadoop.security.token.delegation.TestDelegationToken.TestDelegationTokenSecretManager;
import org.apache.hadoop.util.Time;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.slf4j.event.Level;
/**
* MiniRPCBenchmark measures time to establish an RPC connection
@ -255,9 +254,9 @@ public MiniProtocol run() throws IOException {
}
static void setLoggingLevel(Level level) {
LogManager.getLogger(Server.class.getName()).setLevel(level);
((Log4JLogger)Server.AUDITLOG).getLogger().setLevel(level);
LogManager.getLogger(Client.class.getName()).setLevel(level);
GenericTestUtils.setLogLevel(Server.LOG, level);
GenericTestUtils.setLogLevel(Server.AUDITLOG, level);
GenericTestUtils.setLogLevel(Client.LOG, level);
}
/**
@ -370,7 +369,7 @@ public static void main(String[] args) throws Exception {
useDelegationToken = args[3].equalsIgnoreCase("useToken");
Level l = Level.ERROR;
if(args.length > 4)
l = Level.toLevel(args[4]);
l = GenericTestUtils.toLevel(args[4]);
MiniRPCBenchmark mb = new MiniRPCBenchmark(l);
long elapsedTime = 0;

View File

@ -60,7 +60,6 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
@ -85,7 +84,6 @@
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.log4j.Level;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before;
@ -98,6 +96,7 @@
import com.google.common.base.Supplier;
import com.google.common.primitives.Bytes;
import com.google.common.primitives.Ints;
import org.slf4j.event.Level;
/** Unit tests for IPC. */
public class TestIPC {
@ -864,7 +863,7 @@ public void run() {
@Test(timeout=30000)
public void testConnectionIdleTimeouts() throws Exception {
((Log4JLogger)Server.LOG).getLogger().setLevel(Level.DEBUG);
GenericTestUtils.setLogLevel(Server.LOG, Level.DEBUG);
final int maxIdle = 1000;
final int cleanupInterval = maxIdle*3/4; // stagger cleanups
final int killMax = 3;

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.ipc;
import org.apache.hadoop.conf.Configuration;
import org.apache.log4j.Level;
import org.junit.Test;
import org.slf4j.event.Level;
/**
* Test {@link MiniRPCBenchmark}

View File

@ -50,11 +50,11 @@
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.MetricsAsserts;
import org.apache.hadoop.test.MockitoUtil;
import org.apache.log4j.Level;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import org.mockito.internal.util.reflection.Whitebox;
import org.slf4j.event.Level;
import javax.net.SocketFactory;
import java.io.Closeable;

View File

@ -22,7 +22,6 @@
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
@ -36,7 +35,7 @@
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.token.*;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.log4j.Level;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
@ -44,6 +43,7 @@
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.slf4j.event.Level;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
@ -186,12 +186,12 @@ static String getQOPNames (QualityOfProtection[] qops){
}
static {
((Log4JLogger) Client.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger) Server.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger) SaslRpcClient.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger) SaslRpcServer.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger) SaslInputStream.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger) SecurityUtil.LOG).getLogger().setLevel(Level.ALL);
GenericTestUtils.setLogLevel(Client.LOG, Level.TRACE);
GenericTestUtils.setLogLevel(Server.LOG, Level.TRACE);
GenericTestUtils.setLogLevel(SaslRpcClient.LOG, Level.TRACE);
GenericTestUtils.setLogLevel(SaslRpcServer.LOG, Level.TRACE);
GenericTestUtils.setLogLevel(SaslInputStream.LOG, Level.TRACE);
GenericTestUtils.setLogLevel(SecurityUtil.LOG, Level.TRACE);
}
public static class BadTokenSecretManager extends TestTokenSecretManager {

View File

@ -25,16 +25,16 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Test;
import org.slf4j.event.Level;
public class TestGroupFallback {
public static final Log LOG = LogFactory.getLog(TestGroupFallback.class);
@Test
public void testGroupShell() throws Exception {
Logger.getRootLogger().setLevel(Level.DEBUG);
GenericTestUtils.setRootLogLevel(Level.DEBUG);
Configuration conf = new Configuration();
conf.set(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING,
"org.apache.hadoop.security.ShellBasedUnixGroupsMapping");
@ -50,7 +50,7 @@ public void testGroupShell() throws Exception {
@Test
public void testNetgroupShell() throws Exception {
Logger.getRootLogger().setLevel(Level.DEBUG);
GenericTestUtils.setRootLogLevel(Level.DEBUG);
Configuration conf = new Configuration();
conf.set(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING,
"org.apache.hadoop.security.ShellBasedUnixGroupsNetgroupMapping");
@ -69,7 +69,7 @@ public void testGroupWithFallback() throws Exception {
LOG.info("running 'mvn -Pnative -DTestGroupFallback clear test' will " +
"test the normal path and 'mvn -DTestGroupFallback clear test' will" +
" test the fall back functionality");
Logger.getRootLogger().setLevel(Level.DEBUG);
GenericTestUtils.setRootLogLevel(Level.DEBUG);
Configuration conf = new Configuration();
conf.set(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING,
"org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback");
@ -88,7 +88,7 @@ public void testNetgroupWithFallback() throws Exception {
LOG.info("running 'mvn -Pnative -DTestGroupFallback clear test' will " +
"test the normal path and 'mvn -DTestGroupFallback clear test' will" +
" test the fall back functionality");
Logger.getRootLogger().setLevel(Level.DEBUG);
GenericTestUtils.setRootLogLevel(Level.DEBUG);
Configuration conf = new Configuration();
conf.set(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING,
"org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMappingWithFallback");

View File

@ -23,9 +23,9 @@
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.LambdaTestUtils;
import org.apache.hadoop.util.PlatformName;
import org.apache.log4j.Level;
import org.junit.After;
import org.junit.Test;
import org.slf4j.event.Level;
import javax.security.auth.Subject;
import javax.security.auth.kerberos.KerberosPrincipal;

View File

@ -33,7 +33,6 @@
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Time;
import org.apache.log4j.Level;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
@ -41,6 +40,7 @@
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.event.Level;
import javax.security.auth.Subject;
import javax.security.auth.kerberos.KerberosPrincipal;

View File

@ -26,7 +26,6 @@
import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.log4j.Level;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
@ -34,6 +33,7 @@
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.event.Level;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLEngine;

View File

@ -32,7 +32,6 @@
import org.apache.hadoop.security.authentication.util.KerberosUtil;
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.servlet.ServletContextHandler;
@ -42,6 +41,7 @@
import org.junit.Test;
import org.eclipse.jetty.servlet.FilterHolder;
import org.eclipse.jetty.servlet.ServletHolder;
import org.slf4j.event.Level;
import javax.security.auth.Subject;
import javax.security.auth.kerberos.KerberosPrincipal;

View File

@ -140,6 +140,20 @@ public static void setLogLevel(Log log, Level level) {
setLogLevel((Log4JLogger) log, level);
}
/**
* A helper used in log4j2 migration to accept legacy
* org.apache.commons.logging apis.
* <p>
* And will be removed after migration.
*
* @param log a log
* @param level level to be set
*/
@Deprecated
public static void setLogLevel(Log log, org.slf4j.event.Level level) {
setLogLevel(log, Level.toLevel(level.toString()));
}
/**
* @deprecated
* use {@link #setLogLevel(org.slf4j.Logger, org.slf4j.event.Level)} instead
@ -172,6 +186,22 @@ public static void setLogLevel(org.slf4j.Logger logger,
setLogLevel(toLog4j(logger), Level.toLevel(level.toString()));
}
public static void setRootLogLevel(org.slf4j.event.Level level) {
setLogLevel(LogManager.getRootLogger(), Level.toLevel(level.toString()));
}
public static org.slf4j.event.Level toLevel(String level) {
return toLevel(level, org.slf4j.event.Level.DEBUG);
}
public static org.slf4j.event.Level toLevel(
String level, org.slf4j.event.Level defaultLevel) {
try {
return org.slf4j.event.Level.valueOf(level);
} catch (IllegalArgumentException e) {
return defaultLevel;
}
}
/**
* Extracts the name of the method where the invocation has happened
* @return String name of the invoking method

View File

@ -27,7 +27,9 @@
import org.slf4j.LoggerFactory;
import com.google.common.base.Supplier;
import org.slf4j.event.Level;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@ -151,4 +153,12 @@ public Boolean get() {
assertExceptionContains(GenericTestUtils.ERROR_INVALID_ARGUMENT, e);
}
}
@Test
public void testToLevel() throws Throwable {
assertEquals(Level.INFO, toLevel("INFO"));
assertEquals(Level.DEBUG, toLevel("NonExistLevel"));
assertEquals(Level.INFO, toLevel("INFO", Level.TRACE));
assertEquals(Level.TRACE, toLevel("NonExistLevel", Level.TRACE));
}
}

View File

@ -27,10 +27,11 @@
import java.util.zip.CRC32;
import java.util.zip.Checksum;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.fs.ChecksumException;
import org.apache.log4j.Level;
import org.apache.hadoop.test.GenericTestUtils;
import org.slf4j.event.Level;
import static org.slf4j.LoggerFactory.getLogger;
/**
* Performance tests to compare performance of Crc32|Crc32C implementations
@ -176,8 +177,8 @@ public DataChecksum.Type crcType() {
crcs.add(Crc32.Native.class);
}
crcs.add(Crc32.NativeC.class);
((Log4JLogger)LogFactory.getLog(NativeCodeLoader.class))
.getLogger().setLevel(Level.ALL);
GenericTestUtils.setLogLevel(getLogger(NativeCodeLoader.class),
Level.TRACE);
}
}

View File

@ -47,6 +47,12 @@
<artifactId>hadoop-common</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>test</scope>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>

View File

@ -28,8 +28,7 @@
import org.apache.hadoop.oncrpc.RpcUtil.RpcFrameDecoder;
import org.apache.hadoop.oncrpc.security.CredentialsNone;
import org.apache.hadoop.oncrpc.security.VerifierNone;
import org.apache.log4j.Level;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.test.GenericTestUtils;
import org.jboss.netty.buffer.ByteBufferBackedChannelBuffer;
import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.buffer.ChannelBuffers;
@ -38,11 +37,12 @@
import org.jboss.netty.channel.ChannelHandlerContext;
import org.junit.Test;
import org.mockito.Mockito;
import org.slf4j.event.Level;
public class TestFrameDecoder {
static {
((Log4JLogger) RpcProgram.LOG).getLogger().setLevel(Level.ALL);
GenericTestUtils.setLogLevel(RpcProgram.LOG, Level.TRACE);
}
private static int resultSize;