MAPREDUCE-7001. Moving logging APIs over to slf4j in hadoop-mapreduce-client-shuffle. Contributed by Jinjiang Ling.
This commit is contained in:
parent
ffee10b68e
commit
e4c220ee4f
@ -22,11 +22,11 @@
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.RandomAccessFile;
|
import java.io.RandomAccessFile;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
|
||||||
import org.apache.commons.logging.LogFactory;
|
|
||||||
import org.apache.hadoop.io.ReadaheadPool;
|
import org.apache.hadoop.io.ReadaheadPool;
|
||||||
import org.apache.hadoop.io.ReadaheadPool.ReadaheadRequest;
|
import org.apache.hadoop.io.ReadaheadPool.ReadaheadRequest;
|
||||||
import org.apache.hadoop.io.nativeio.NativeIO;
|
import org.apache.hadoop.io.nativeio.NativeIO;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import static org.apache.hadoop.io.nativeio.NativeIO.POSIX.POSIX_FADV_DONTNEED;
|
import static org.apache.hadoop.io.nativeio.NativeIO.POSIX.POSIX_FADV_DONTNEED;
|
||||||
|
|
||||||
@ -34,7 +34,8 @@
|
|||||||
|
|
||||||
public class FadvisedChunkedFile extends ChunkedFile {
|
public class FadvisedChunkedFile extends ChunkedFile {
|
||||||
|
|
||||||
private static final Log LOG = LogFactory.getLog(FadvisedChunkedFile.class);
|
private static final Logger LOG =
|
||||||
|
LoggerFactory.getLogger(FadvisedChunkedFile.class);
|
||||||
|
|
||||||
private final boolean manageOsCache;
|
private final boolean manageOsCache;
|
||||||
private final int readaheadLength;
|
private final int readaheadLength;
|
||||||
|
@ -25,11 +25,11 @@
|
|||||||
import java.nio.channels.FileChannel;
|
import java.nio.channels.FileChannel;
|
||||||
import java.nio.channels.WritableByteChannel;
|
import java.nio.channels.WritableByteChannel;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
|
||||||
import org.apache.commons.logging.LogFactory;
|
|
||||||
import org.apache.hadoop.io.ReadaheadPool;
|
import org.apache.hadoop.io.ReadaheadPool;
|
||||||
import org.apache.hadoop.io.ReadaheadPool.ReadaheadRequest;
|
import org.apache.hadoop.io.ReadaheadPool.ReadaheadRequest;
|
||||||
import org.apache.hadoop.io.nativeio.NativeIO;
|
import org.apache.hadoop.io.nativeio.NativeIO;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import static org.apache.hadoop.io.nativeio.NativeIO.POSIX.POSIX_FADV_DONTNEED;
|
import static org.apache.hadoop.io.nativeio.NativeIO.POSIX.POSIX_FADV_DONTNEED;
|
||||||
|
|
||||||
@ -39,7 +39,8 @@
|
|||||||
|
|
||||||
public class FadvisedFileRegion extends DefaultFileRegion {
|
public class FadvisedFileRegion extends DefaultFileRegion {
|
||||||
|
|
||||||
private static final Log LOG = LogFactory.getLog(FadvisedFileRegion.class);
|
private static final Logger LOG =
|
||||||
|
LoggerFactory.getLogger(FadvisedFileRegion.class);
|
||||||
|
|
||||||
private final boolean manageOsCache;
|
private final boolean manageOsCache;
|
||||||
private final int readaheadLength;
|
private final int readaheadLength;
|
||||||
|
@ -54,8 +54,6 @@
|
|||||||
|
|
||||||
import javax.crypto.SecretKey;
|
import javax.crypto.SecretKey;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
|
||||||
import org.apache.commons.logging.LogFactory;
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.io.DataInputByteBuffer;
|
import org.apache.hadoop.io.DataInputByteBuffer;
|
||||||
@ -134,6 +132,7 @@
|
|||||||
import org.jboss.netty.util.HashedWheelTimer;
|
import org.jboss.netty.util.HashedWheelTimer;
|
||||||
import org.jboss.netty.util.Timer;
|
import org.jboss.netty.util.Timer;
|
||||||
import org.eclipse.jetty.http.HttpHeader;
|
import org.eclipse.jetty.http.HttpHeader;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import com.google.common.base.Charsets;
|
import com.google.common.base.Charsets;
|
||||||
@ -148,9 +147,10 @@
|
|||||||
|
|
||||||
public class ShuffleHandler extends AuxiliaryService {
|
public class ShuffleHandler extends AuxiliaryService {
|
||||||
|
|
||||||
private static final Log LOG = LogFactory.getLog(ShuffleHandler.class);
|
private static final org.slf4j.Logger LOG =
|
||||||
private static final Log AUDITLOG =
|
LoggerFactory.getLogger(ShuffleHandler.class);
|
||||||
LogFactory.getLog(ShuffleHandler.class.getName()+".audit");
|
private static final org.slf4j.Logger AUDITLOG =
|
||||||
|
LoggerFactory.getLogger(ShuffleHandler.class.getName()+".audit");
|
||||||
public static final String SHUFFLE_MANAGE_OS_CACHE = "mapreduce.shuffle.manage.os.cache";
|
public static final String SHUFFLE_MANAGE_OS_CACHE = "mapreduce.shuffle.manage.os.cache";
|
||||||
public static final boolean DEFAULT_SHUFFLE_MANAGE_OS_CACHE = true;
|
public static final boolean DEFAULT_SHUFFLE_MANAGE_OS_CACHE = true;
|
||||||
|
|
||||||
@ -775,7 +775,8 @@ private void removeJobShuffleInfo(JobID jobId) throws IOException {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static class LevelDBLogger implements Logger {
|
private static class LevelDBLogger implements Logger {
|
||||||
private static final Log LOG = LogFactory.getLog(LevelDBLogger.class);
|
private static final org.slf4j.Logger LOG =
|
||||||
|
LoggerFactory.getLogger(LevelDBLogger.class);
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void log(String message) {
|
public void log(String message) {
|
||||||
|
@ -25,18 +25,18 @@
|
|||||||
import java.nio.channels.WritableByteChannel;
|
import java.nio.channels.WritableByteChannel;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
|
||||||
import org.apache.commons.logging.LogFactory;
|
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.io.IOUtils;
|
import org.apache.hadoop.io.IOUtils;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
public class TestFadvisedFileRegion {
|
public class TestFadvisedFileRegion {
|
||||||
private final int FILE_SIZE = 16*1024*1024;
|
private final int FILE_SIZE = 16*1024*1024;
|
||||||
private static final Log LOG =
|
private static final Logger LOG =
|
||||||
LogFactory.getLog(TestFadvisedFileRegion.class);
|
LoggerFactory.getLogger(TestFadvisedFileRegion.class);
|
||||||
|
|
||||||
@Test(timeout = 100000)
|
@Test(timeout = 100000)
|
||||||
public void testCustomShuffleTransfer() throws IOException {
|
public void testCustomShuffleTransfer() throws IOException {
|
||||||
@ -66,7 +66,7 @@ public void testCustomShuffleTransfer() throws IOException {
|
|||||||
try{
|
try{
|
||||||
out.write(initBuff);
|
out.write(initBuff);
|
||||||
} finally {
|
} finally {
|
||||||
IOUtils.cleanup(LOG, out);
|
IOUtils.cleanupWithLogger(LOG, out);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -106,9 +106,9 @@ public void testCustomShuffleTransfer() throws IOException {
|
|||||||
if (fileRegion != null) {
|
if (fileRegion != null) {
|
||||||
fileRegion.releaseExternalResources();
|
fileRegion.releaseExternalResources();
|
||||||
}
|
}
|
||||||
IOUtils.cleanup(LOG, target);
|
IOUtils.cleanupWithLogger(LOG, target);
|
||||||
IOUtils.cleanup(LOG, targetFile);
|
IOUtils.cleanupWithLogger(LOG, targetFile);
|
||||||
IOUtils.cleanup(LOG, inputFile);
|
IOUtils.cleanupWithLogger(LOG, inputFile);
|
||||||
}
|
}
|
||||||
|
|
||||||
//Read the target file and verify that copy is done correctly
|
//Read the target file and verify that copy is done correctly
|
||||||
@ -123,7 +123,7 @@ public void testCustomShuffleTransfer() throws IOException {
|
|||||||
Assert.assertEquals(initBuff[position+i], buff[i]);
|
Assert.assertEquals(initBuff[position+i], buff[i]);
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
IOUtils.cleanup(LOG, in);
|
IOUtils.cleanupWithLogger(LOG, in);
|
||||||
}
|
}
|
||||||
|
|
||||||
//delete files and folders
|
//delete files and folders
|
||||||
|
@ -48,8 +48,6 @@
|
|||||||
import java.util.zip.CheckedOutputStream;
|
import java.util.zip.CheckedOutputStream;
|
||||||
import java.util.zip.Checksum;
|
import java.util.zip.Checksum;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
|
||||||
import org.apache.commons.logging.LogFactory;
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
@ -99,10 +97,13 @@
|
|||||||
import org.mockito.stubbing.Answer;
|
import org.mockito.stubbing.Answer;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
import org.eclipse.jetty.http.HttpHeader;
|
import org.eclipse.jetty.http.HttpHeader;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
public class TestShuffleHandler {
|
public class TestShuffleHandler {
|
||||||
static final long MiB = 1024 * 1024;
|
static final long MiB = 1024 * 1024;
|
||||||
private static final Log LOG = LogFactory.getLog(TestShuffleHandler.class);
|
private static final Logger LOG =
|
||||||
|
LoggerFactory.getLogger(TestShuffleHandler.class);
|
||||||
private static final File ABS_LOG_DIR = GenericTestUtils.getTestDir(
|
private static final File ABS_LOG_DIR = GenericTestUtils.getTestDir(
|
||||||
TestShuffleHandler.class.getSimpleName() + "LocDir");
|
TestShuffleHandler.class.getSimpleName() + "LocDir");
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user