MAPREDUCE-6026. native-task: fix logging. Contributed by Manu Zhang.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1617878 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Todd Lipcon 2014-08-14 04:53:36 +00:00
parent 886338ff80
commit 808bf8bac1
9 changed files with 51 additions and 14 deletions

View File

@ -13,3 +13,4 @@ MAPREDUCE-5984. native-task: Reuse lz4 sources in hadoop-common (Binglin Chang)
MAPREDUCE-5976. native-task: should not fail to build if snappy is missing (Manu Zhang)
MAPREDUCE-5978. native-task: remove test case for not supported codec Bzip2Codec and DefaultCodec (Manu Zhang)
MAPREDUCE-6006. native-task: add native tests to maven and fix bug in pom.xml (Binglin Chang via todd)
MAPREDUCE-6026. native-task: fix logging (Manu Zhang via todd)

View File

@ -19,6 +19,8 @@
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.ByteWritable;
import org.apache.hadoop.io.BytesWritable;
@ -33,10 +35,9 @@
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.nativetask.serde.*;
import org.apache.log4j.Logger;
public class HadoopPlatform extends Platform {
private static final Logger LOG = Logger.getLogger(HadoopPlatform.class);
private static final Log LOG = LogFactory.getLog(HadoopPlatform.class);
public HadoopPlatform() throws IOException {
}

View File

@ -20,11 +20,12 @@
import java.io.IOException;
import java.util.ServiceLoader;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.nativetask.serde.INativeSerializer;
import org.apache.hadoop.mapred.nativetask.serde.NativeSerialization;
import org.apache.log4j.Logger;
/**
@ -34,7 +35,7 @@
*/
public class Platforms {
private static final Logger LOG = Logger.getLogger(Platforms.class);
private static final Log LOG = LogFactory.getLog(Platforms.class);
private static final ServiceLoader<Platform> platforms = ServiceLoader.load(Platform.class);
public static void init(Configuration conf) throws IOException {

View File

@ -19,6 +19,8 @@
import static org.junit.Assert.assertEquals;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -34,6 +36,7 @@
import org.junit.Test;
public class LargeKVCombinerTest {
private static final Log LOG = LogFactory.getLog(LargeKVCombinerTest.class);
@Test
public void testLargeValueCombiner(){
@ -57,7 +60,7 @@ public void testLargeValueCombiner(){
int max = i;
int min = Math.max(i / 4, max - 10);
System.out.println("===KV Size Test: min size: " + min + ", max size: " + max);
LOG.info("===KV Size Test: min size: " + min + ", max size: " + max);
normalConf.set(TestConstants.NATIVETASK_KVSIZE_MIN, String.valueOf(min));
normalConf.set(TestConstants.NATIVETASK_KVSIZE_MAX, String.valueOf(max));

View File

@ -23,6 +23,8 @@
import java.util.ArrayList;
import java.util.Arrays;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -37,6 +39,8 @@
@RunWith(Parameterized.class)
public class KVTest {
private static final Log LOG = LogFactory.getLog(KVTest.class);
private static Class<?>[] keyclasses = null;
private static Class<?>[] valueclasses = null;
private static String[] keyclassNames = null;
@ -53,7 +57,7 @@ public class KVTest {
public static Iterable<Class<?>[]> data() {
final String valueclassesStr = nativekvtestconf
.get(TestConstants.NATIVETASK_KVTEST_VALUECLASSES);
System.out.println(valueclassesStr);
LOG.info(valueclassesStr);
valueclassNames = valueclassesStr.replaceAll("\\s", "").split(";");// delete
// " "
final ArrayList<Class<?>> tmpvalueclasses = new ArrayList<Class<?>>();
@ -69,7 +73,7 @@ public static Iterable<Class<?>[]> data() {
}
valueclasses = tmpvalueclasses.toArray(new Class[tmpvalueclasses.size()]);
final String keyclassesStr = nativekvtestconf.get(TestConstants.NATIVETASK_KVTEST_KEYCLASSES);
System.out.println(keyclassesStr);
LOG.info(keyclassesStr);
keyclassNames = keyclassesStr.replaceAll("\\s", "").split(";");// delete
// " "
final ArrayList<Class<?>> tmpkeyclasses = new ArrayList<Class<?>>();

View File

@ -21,6 +21,8 @@
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -32,6 +34,7 @@
import org.junit.Test;
public class LargeKVTest {
private static final Log LOG = LogFactory.getLog(LargeKVTest.class);
@Test
public void testKeySize() {
@ -69,7 +72,7 @@ public void runKVSizeTests(Class<?> keyClass, Class<?> valueClass) {
normalConf.set(TestConstants.NATIVETASK_KVSIZE_MIN, String.valueOf(min));
normalConf.set(TestConstants.NATIVETASK_KVSIZE_MAX, String.valueOf(max));
System.out.println("===KV Size Test: min size: " + min + ", max size: " + max + ", keyClass: "
LOG.info("===KV Size Test: min size: " + min + ", max size: " + max + ", keyClass: "
+ keyClass.getName() + ", valueClass: " + valueClass.getName());
final String nativeOutPut = runNativeLargeKVTest("Test Large Value Size:" + String.valueOf(i), keyClass,

View File

@ -21,6 +21,8 @@
import java.util.HashMap;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -40,6 +42,7 @@
public class TestInputFile {
private static Log LOG = LogFactory.getLog(TestInputFile.class);
public static class KVSizeScope {
private static final int DefaultMinNum = 1;
@ -120,8 +123,8 @@ public void createSequenceTestFile(String filepath, int base) throws Exception {
}
public void createSequenceTestFile(String filepath, int base, byte start) throws Exception {
System.out.println("create file " + filepath);
System.out.println(keyClsName + " " + valueClsName);
LOG.info("create file " + filepath);
LOG.info(keyClsName + " " + valueClsName);
Class<?> tmpkeycls, tmpvaluecls;
try {
tmpkeycls = Class.forName(keyClsName);

View File

@ -19,9 +19,12 @@
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.mapred.nativetask.NativeMapOutputCollectorDelegator;
public class EnforceNativeOutputCollectorDelegator<K, V> extends NativeMapOutputCollectorDelegator<K, V> {
private static final Log LOG = LogFactory.getLog(EnforceNativeOutputCollectorDelegator.class);
private boolean nativetaskloaded = false;
@Override
@ -32,8 +35,7 @@ public void init(Context context)
nativetaskloaded = true;
} catch (final Exception e) {
nativetaskloaded = false;
System.err.println("load nativetask lib failed, Native-Task Delegation is disabled");
e.printStackTrace();
LOG.error("load nativetask lib failed, Native-Task Delegation is disabled", e);
}
}

View File

@ -0,0 +1,19 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# log4j configuration used during build and unit tests
log4j.rootLogger=info,stdout
log4j.threshhold=ALL
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2} (%F:%M(%L)) - %m%n