HADOOP-8756. Fix SEGV when libsnappy is in java.library.path but not LD_LIBRARY_PATH. Contributed by Colin Patrick McCabe

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1393243 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Eli Collins 2012-10-03 00:06:39 +00:00
parent f8f79a02aa
commit fc54f57c59
10 changed files with 91 additions and 127 deletions

View File

@ -310,6 +310,9 @@ Release 2.0.3-alpha - Unreleased
HADOOP-8616. ViewFS configuration requires a trailing slash. (Sandy Ryza
via atm)
HADOOP-8756. Fix SEGV when libsnappy is in java.library.path but
not LD_LIBRARY_PATH. (Colin Patrick McCabe via eli)
Release 2.0.2-alpha - 2012-09-07
INCOMPATIBLE CHANGES

View File

@ -123,6 +123,7 @@ add_dual_library(hadoop
${D}/security/JniBasedUnixGroupsMapping.c
${D}/security/JniBasedUnixGroupsNetgroupMapping.c
${D}/security/getGroup.c
${D}/util/NativeCodeLoader.c
${D}/util/NativeCrc32.c
${D}/util/bulk_crc32.c
)

View File

@ -2,7 +2,6 @@
#define CONFIG_H
#cmakedefine HADOOP_ZLIB_LIBRARY "@HADOOP_ZLIB_LIBRARY@"
#cmakedefine HADOOP_RUNAS_HOME "@HADOOP_RUNAS_HOME@"
#cmakedefine HADOOP_SNAPPY_LIBRARY "@HADOOP_SNAPPY_LIBRARY@"
#cmakedefine HAVE_SYNC_FILE_RANGE
#cmakedefine HAVE_POSIX_FADVISE

View File

@ -24,7 +24,6 @@
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.compress.snappy.LoadSnappy;
import org.apache.hadoop.io.compress.snappy.SnappyCompressor;
import org.apache.hadoop.io.compress.snappy.SnappyDecompressor;
import org.apache.hadoop.fs.CommonConfigurationKeys;
@ -34,11 +33,6 @@
* This class creates snappy compressors/decompressors.
*/
public class SnappyCodec implements Configurable, CompressionCodec {
static {
LoadSnappy.isLoaded();
}
Configuration conf;
/**
@ -63,11 +57,26 @@ public Configuration getConf() {
/**
* Are the native snappy libraries loaded & initialized?
*
* @return true if loaded & initialized, otherwise false
*/
public static void checkNativeCodeLoaded() {
if (!NativeCodeLoader.buildSupportsSnappy()) {
throw new RuntimeException("native snappy library not available: " +
"this version of libhadoop was built without " +
"snappy support.");
}
if (!SnappyCompressor.isNativeCodeLoaded()) {
throw new RuntimeException("native snappy library not available: " +
"SnappyCompressor has not been loaded.");
}
if (!SnappyDecompressor.isNativeCodeLoaded()) {
throw new RuntimeException("native snappy library not available: " +
"SnappyDecompressor has not been loaded.");
}
}
public static boolean isNativeCodeLoaded() {
return LoadSnappy.isLoaded() && NativeCodeLoader.isNativeCodeLoaded();
return SnappyCompressor.isNativeCodeLoaded() &&
SnappyDecompressor.isNativeCodeLoaded();
}
/**
@ -97,9 +106,7 @@ public CompressionOutputStream createOutputStream(OutputStream out)
public CompressionOutputStream createOutputStream(OutputStream out,
Compressor compressor)
throws IOException {
if (!isNativeCodeLoaded()) {
throw new RuntimeException("native snappy library not available");
}
checkNativeCodeLoaded();
int bufferSize = conf.getInt(
CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY,
CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT);
@ -117,10 +124,7 @@ public CompressionOutputStream createOutputStream(OutputStream out,
*/
@Override
public Class<? extends Compressor> getCompressorType() {
if (!isNativeCodeLoaded()) {
throw new RuntimeException("native snappy library not available");
}
checkNativeCodeLoaded();
return SnappyCompressor.class;
}
@ -131,9 +135,7 @@ public Class<? extends Compressor> getCompressorType() {
*/
@Override
public Compressor createCompressor() {
if (!isNativeCodeLoaded()) {
throw new RuntimeException("native snappy library not available");
}
checkNativeCodeLoaded();
int bufferSize = conf.getInt(
CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY,
CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT);
@ -167,10 +169,7 @@ public CompressionInputStream createInputStream(InputStream in)
public CompressionInputStream createInputStream(InputStream in,
Decompressor decompressor)
throws IOException {
if (!isNativeCodeLoaded()) {
throw new RuntimeException("native snappy library not available");
}
checkNativeCodeLoaded();
return new BlockDecompressorStream(in, decompressor, conf.getInt(
CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY,
CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT));
@ -183,10 +182,7 @@ public CompressionInputStream createInputStream(InputStream in,
*/
@Override
public Class<? extends Decompressor> getDecompressorType() {
if (!isNativeCodeLoaded()) {
throw new RuntimeException("native snappy library not available");
}
checkNativeCodeLoaded();
return SnappyDecompressor.class;
}
@ -197,9 +193,7 @@ public Class<? extends Decompressor> getDecompressorType() {
*/
@Override
public Decompressor createDecompressor() {
if (!isNativeCodeLoaded()) {
throw new RuntimeException("native snappy library not available");
}
checkNativeCodeLoaded();
int bufferSize = conf.getInt(
CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY,
CommonConfigurationKeys.IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT);

View File

@ -1,70 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.compress.snappy;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.util.NativeCodeLoader;
/**
* Determines if Snappy native library is available and loads it if available.
*/
public class LoadSnappy {
private static final Log LOG = LogFactory.getLog(LoadSnappy.class.getName());
private static boolean AVAILABLE = false;
private static boolean LOADED = false;
static {
try {
System.loadLibrary("snappy");
LOG.warn("Snappy native library is available");
AVAILABLE = true;
} catch (UnsatisfiedLinkError ex) {
//NOP
}
boolean hadoopNativeAvailable = NativeCodeLoader.isNativeCodeLoaded();
LOADED = AVAILABLE && hadoopNativeAvailable;
if (LOADED) {
LOG.info("Snappy native library loaded");
} else {
LOG.warn("Snappy native library not loaded");
}
}
/**
* Returns if Snappy native library is loaded.
*
* @return <code>true</code> if Snappy native library is loaded,
* <code>false</code> if not.
*/
public static boolean isAvailable() {
return AVAILABLE;
}
/**
* Returns if Snappy native library is loaded.
*
* @return <code>true</code> if Snappy native library is loaded,
* <code>false</code> if not.
*/
public static boolean isLoaded() {
return LOADED;
}
}

View File

@ -26,6 +26,7 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.compress.Compressor;
import org.apache.hadoop.util.NativeCodeLoader;
/**
* A {@link Compressor} based on the snappy compression algorithm.
@ -51,22 +52,24 @@ public class SnappyCompressor implements Compressor {
private long bytesRead = 0L;
private long bytesWritten = 0L;
private static boolean nativeSnappyLoaded = false;
static {
if (LoadSnappy.isLoaded()) {
// Initialize the native library
if (NativeCodeLoader.isNativeCodeLoaded() &&
NativeCodeLoader.buildSupportsSnappy()) {
try {
initIDs();
nativeSnappyLoaded = true;
} catch (Throwable t) {
// Ignore failure to load/initialize snappy
LOG.warn(t.toString());
LOG.error("failed to load SnappyCompressor", t);
}
} else {
LOG.error("Cannot load " + SnappyCompressor.class.getName() +
" without snappy library!");
}
}
public static boolean isNativeCodeLoaded() {
return nativeSnappyLoaded;
}
/**
* Creates a new compressor.
*

View File

@ -25,6 +25,7 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.compress.Decompressor;
import org.apache.hadoop.util.NativeCodeLoader;
/**
* A {@link Decompressor} based on the snappy compression algorithm.
@ -47,21 +48,24 @@ public class SnappyDecompressor implements Decompressor {
private int userBufOff = 0, userBufLen = 0;
private boolean finished;
private static boolean nativeSnappyLoaded = false;
static {
if (LoadSnappy.isLoaded()) {
// Initialize the native library
if (NativeCodeLoader.isNativeCodeLoaded() &&
NativeCodeLoader.buildSupportsSnappy()) {
try {
initIDs();
nativeSnappyLoaded = true;
} catch (Throwable t) {
// Ignore failure to load/initialize snappy
LOG.warn(t.toString());
LOG.error("failed to load SnappyDecompressor", t);
}
} else {
LOG.error("Cannot load " + SnappyDecompressor.class.getName() +
" without snappy library!");
}
}
public static boolean isNativeCodeLoaded() {
return nativeSnappyLoaded;
}
/**
* Creates a new compressor.
*

View File

@ -74,6 +74,11 @@ public static boolean isNativeCodeLoaded() {
return nativeCodeLoaded;
}
/**
* Returns true only if this build was compiled with support for snappy.
*/
public static native boolean buildSupportsSnappy();
/**
* Return if native hadoop libraries, if present, can be used for this job.
* @param conf configuration

View File

@ -0,0 +1,31 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "config.h"
#include <jni.h>
JNIEXPORT jboolean JNICALL Java_org_apache_hadoop_util_NativeCodeLoader_buildSupportsSnappy
(JNIEnv *env, jclass clazz)
{
#ifdef HADOOP_SNAPPY_LIBRARY
return JNI_TRUE;
#else
return JNI_FALSE;
#endif
}

View File

@ -54,7 +54,6 @@
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.compress.snappy.LoadSnappy;
import org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor;
import org.apache.hadoop.io.compress.zlib.BuiltInZlibDeflater;
import org.apache.hadoop.io.compress.zlib.BuiltInZlibInflater;
@ -103,14 +102,9 @@ public void testBZip2Codec() throws IOException {
@Test
public void testSnappyCodec() throws IOException {
if (LoadSnappy.isAvailable()) {
if (LoadSnappy.isLoaded()) {
codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.SnappyCodec");
codecTest(conf, seed, count, "org.apache.hadoop.io.compress.SnappyCodec");
}
else {
Assert.fail("Snappy native available but Hadoop native not");
}
if (SnappyCodec.isNativeCodeLoaded()) {
codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.SnappyCodec");
codecTest(conf, seed, count, "org.apache.hadoop.io.compress.SnappyCodec");
}
}