MAPREDUCE-6947. Moving logging APIs over to slf4j in hadoop-mapreduce-examples. Contributed by Gergery Novák.
This commit is contained in:
parent
56ef5279c1
commit
2018538fdb
@ -29,8 +29,6 @@
|
|||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
|
||||||
import org.apache.commons.logging.LogFactory;
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.conf.Configured;
|
import org.apache.hadoop.conf.Configured;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
@ -51,6 +49,8 @@
|
|||||||
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
||||||
import org.apache.hadoop.util.Tool;
|
import org.apache.hadoop.util.Tool;
|
||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import com.google.common.base.Charsets;
|
||||||
|
|
||||||
@ -83,7 +83,8 @@ public class BaileyBorweinPlouffe extends Configured implements Tool {
|
|||||||
private static final String DIGIT_SIZE_PROPERTY = NAME + ".digit.size";
|
private static final String DIGIT_SIZE_PROPERTY = NAME + ".digit.size";
|
||||||
private static final String DIGIT_PARTS_PROPERTY = NAME + ".digit.parts";
|
private static final String DIGIT_PARTS_PROPERTY = NAME + ".digit.parts";
|
||||||
|
|
||||||
private static final Log LOG = LogFactory.getLog(BaileyBorweinPlouffe.class);
|
private static final Logger LOG =
|
||||||
|
LoggerFactory.getLogger(BaileyBorweinPlouffe.class);
|
||||||
|
|
||||||
/** Mapper class computing digits of Pi. */
|
/** Mapper class computing digits of Pi. */
|
||||||
public static class BbpMapper extends
|
public static class BbpMapper extends
|
||||||
|
@ -29,8 +29,6 @@
|
|||||||
import java.sql.Statement;
|
import java.sql.Statement;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
|
||||||
import org.apache.commons.logging.LogFactory;
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.conf.Configured;
|
import org.apache.hadoop.conf.Configured;
|
||||||
import org.apache.hadoop.io.LongWritable;
|
import org.apache.hadoop.io.LongWritable;
|
||||||
@ -49,6 +47,8 @@
|
|||||||
import org.apache.hadoop.util.Tool;
|
import org.apache.hadoop.util.Tool;
|
||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
import org.hsqldb.server.Server;
|
import org.hsqldb.server.Server;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This is a demonstrative program, which uses DBInputFormat for reading
|
* This is a demonstrative program, which uses DBInputFormat for reading
|
||||||
@ -77,7 +77,8 @@
|
|||||||
*/
|
*/
|
||||||
public class DBCountPageView extends Configured implements Tool {
|
public class DBCountPageView extends Configured implements Tool {
|
||||||
|
|
||||||
private static final Log LOG = LogFactory.getLog(DBCountPageView.class);
|
private static final Logger LOG =
|
||||||
|
LoggerFactory.getLogger(DBCountPageView.class);
|
||||||
|
|
||||||
private Connection connection;
|
private Connection connection;
|
||||||
private boolean initialized = false;
|
private boolean initialized = false;
|
||||||
|
@ -19,8 +19,8 @@
|
|||||||
|
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.slf4j.Logger;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A generic solver for tile laying problems using Knuth's dancing link
|
* A generic solver for tile laying problems using Knuth's dancing link
|
||||||
@ -35,8 +35,7 @@
|
|||||||
* The type parameter ColumnName is the class of application's column names.
|
* The type parameter ColumnName is the class of application's column names.
|
||||||
*/
|
*/
|
||||||
public class DancingLinks<ColumnName> {
|
public class DancingLinks<ColumnName> {
|
||||||
private static final Log LOG =
|
private static final Logger LOG = LoggerFactory.getLogger(DancingLinks.class);
|
||||||
LogFactory.getLog(DancingLinks.class.getName());
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A cell in the table with up/down and left/right links that form doubly
|
* A cell in the table with up/down and left/right links that form doubly
|
||||||
|
@ -28,8 +28,6 @@
|
|||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.concurrent.Callable;
|
import java.util.concurrent.Callable;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
|
||||||
import org.apache.commons.logging.LogFactory;
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.conf.Configured;
|
import org.apache.hadoop.conf.Configured;
|
||||||
import org.apache.hadoop.examples.pi.math.Summation;
|
import org.apache.hadoop.examples.pi.math.Summation;
|
||||||
@ -55,6 +53,8 @@
|
|||||||
import org.apache.hadoop.net.NetUtils;
|
import org.apache.hadoop.net.NetUtils;
|
||||||
import org.apache.hadoop.util.Tool;
|
import org.apache.hadoop.util.Tool;
|
||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The main class for computing sums using map/reduce jobs.
|
* The main class for computing sums using map/reduce jobs.
|
||||||
@ -66,7 +66,7 @@
|
|||||||
* a mix-type job may be executed on either side.
|
* a mix-type job may be executed on either side.
|
||||||
*/
|
*/
|
||||||
public final class DistSum extends Configured implements Tool {
|
public final class DistSum extends Configured implements Tool {
|
||||||
private static final Log LOG = LogFactory.getLog(DistSum.class);
|
private static final Logger LOG = LoggerFactory.getLogger(DistSum.class);
|
||||||
|
|
||||||
private static final String NAME = DistSum.class.getSimpleName();
|
private static final String NAME = DistSum.class.getSimpleName();
|
||||||
private static final String N_PARTS = "mapreduce.pi." + NAME + ".nParts";
|
private static final String N_PARTS = "mapreduce.pi." + NAME + ".nParts";
|
||||||
|
@ -25,8 +25,6 @@
|
|||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.zip.Checksum;
|
import java.util.zip.Checksum;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
|
||||||
import org.apache.commons.logging.LogFactory;
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.conf.Configured;
|
import org.apache.hadoop.conf.Configured;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
@ -49,6 +47,8 @@
|
|||||||
import org.apache.hadoop.util.PureJavaCrc32;
|
import org.apache.hadoop.util.PureJavaCrc32;
|
||||||
import org.apache.hadoop.util.Tool;
|
import org.apache.hadoop.util.Tool;
|
||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generate the official GraySort input data set.
|
* Generate the official GraySort input data set.
|
||||||
@ -66,7 +66,7 @@
|
|||||||
* <b>bin/hadoop jar hadoop-*-examples.jar teragen 10000000000 in-dir</b>
|
* <b>bin/hadoop jar hadoop-*-examples.jar teragen 10000000000 in-dir</b>
|
||||||
*/
|
*/
|
||||||
public class TeraGen extends Configured implements Tool {
|
public class TeraGen extends Configured implements Tool {
|
||||||
private static final Log LOG = LogFactory.getLog(TeraGen.class);
|
private static final Logger LOG = LoggerFactory.getLogger(TeraGen.class);
|
||||||
|
|
||||||
public enum Counters {CHECKSUM}
|
public enum Counters {CHECKSUM}
|
||||||
|
|
||||||
|
@ -21,8 +21,6 @@
|
|||||||
import java.io.FileNotFoundException;
|
import java.io.FileNotFoundException;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
|
||||||
import org.apache.commons.logging.LogFactory;
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
import org.apache.hadoop.fs.FileStatus;
|
import org.apache.hadoop.fs.FileStatus;
|
||||||
@ -38,12 +36,15 @@
|
|||||||
import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
|
import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
|
||||||
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
||||||
import org.apache.hadoop.mapreduce.security.TokenCache;
|
import org.apache.hadoop.mapreduce.security.TokenCache;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An output format that writes the key and value appended together.
|
* An output format that writes the key and value appended together.
|
||||||
*/
|
*/
|
||||||
public class TeraOutputFormat extends FileOutputFormat<Text,Text> {
|
public class TeraOutputFormat extends FileOutputFormat<Text,Text> {
|
||||||
private static final Log LOG = LogFactory.getLog(TeraOutputFormat.class);
|
private static final Logger LOG =
|
||||||
|
LoggerFactory.getLogger(TeraOutputFormat.class);
|
||||||
private OutputCommitter committer = null;
|
private OutputCommitter committer = null;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -21,17 +21,18 @@
|
|||||||
import java.io.*;
|
import java.io.*;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
|
||||||
import org.apache.commons.logging.LogFactory;
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.mapreduce.InputSplit;
|
import org.apache.hadoop.mapreduce.InputSplit;
|
||||||
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
|
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
|
||||||
import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
|
import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import com.google.common.base.Charsets;
|
import com.google.common.base.Charsets;
|
||||||
|
|
||||||
class TeraScheduler {
|
class TeraScheduler {
|
||||||
private static final Log LOG = LogFactory.getLog(TeraScheduler.class);
|
private static final Logger LOG =
|
||||||
|
LoggerFactory.getLogger(TeraScheduler.class);
|
||||||
private Split[] splits;
|
private Split[] splits;
|
||||||
private List<Host> hosts = new ArrayList<Host>();
|
private List<Host> hosts = new ArrayList<Host>();
|
||||||
private int slotsPerHost;
|
private int slotsPerHost;
|
||||||
|
@ -23,8 +23,6 @@
|
|||||||
import java.io.PrintStream;
|
import java.io.PrintStream;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
|
||||||
import org.apache.commons.logging.LogFactory;
|
|
||||||
import org.apache.hadoop.conf.Configurable;
|
import org.apache.hadoop.conf.Configurable;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.conf.Configured;
|
import org.apache.hadoop.conf.Configured;
|
||||||
@ -38,6 +36,8 @@
|
|||||||
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
||||||
import org.apache.hadoop.util.Tool;
|
import org.apache.hadoop.util.Tool;
|
||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates the sampled split points, launches the job, and waits for it to
|
* Generates the sampled split points, launches the job, and waits for it to
|
||||||
@ -47,7 +47,7 @@
|
|||||||
* <b>bin/hadoop jar hadoop-*-examples.jar terasort in-dir out-dir</b>
|
* <b>bin/hadoop jar hadoop-*-examples.jar terasort in-dir out-dir</b>
|
||||||
*/
|
*/
|
||||||
public class TeraSort extends Configured implements Tool {
|
public class TeraSort extends Configured implements Tool {
|
||||||
private static final Log LOG = LogFactory.getLog(TeraSort.class);
|
private static final Logger LOG = LoggerFactory.getLogger(TeraSort.class);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A partitioner that splits text keys into roughly equal partitions
|
* A partitioner that splits text keys into roughly equal partitions
|
||||||
|
@ -20,8 +20,6 @@
|
|||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
|
||||||
import org.apache.commons.logging.LogFactory;
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.mapred.FileAlreadyExistsException;
|
import org.apache.hadoop.mapred.FileAlreadyExistsException;
|
||||||
@ -29,12 +27,14 @@
|
|||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
import static org.junit.Assert.fail;
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
public class TestTeraSort extends HadoopTestCase {
|
public class TestTeraSort extends HadoopTestCase {
|
||||||
private static Log LOG = LogFactory.getLog(TestTeraSort.class);
|
private static final Logger LOG = LoggerFactory.getLogger(TestTeraSort.class);
|
||||||
|
|
||||||
public TestTeraSort()
|
public TestTeraSort()
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
Loading…
Reference in New Issue
Block a user