From 50af34f778f9fde11ef5d209a1ba5a432cc9b48a Mon Sep 17 00:00:00 2001 From: Colin McCabe Date: Sat, 14 Sep 2013 00:05:29 +0000 Subject: [PATCH] HDFS-5201. NativeIO: consolidate getrlimit into NativeIO#getMemlockLimit. (Contributed by Colin Patrick McCabe) git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-4949@1523153 13f79535-47bb-0310-9956-ffa450edef68 --- .../apache/hadoop/io/nativeio/NativeIO.java | 52 +++++-------------- .../org/apache/hadoop/io/nativeio/NativeIO.c | 51 +++++++----------- .../hadoop/io/nativeio/TestNativeIO.java | 2 +- .../hadoop-hdfs/CHANGES-HDFS-4949.txt | 4 ++ .../hadoop/hdfs/server/datanode/DataNode.java | 2 +- .../hadoop/hdfs/TestDatanodeConfig.java | 6 ++- 6 files changed, 44 insertions(+), 73 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java index 96193eed03..3d6ce7b6c0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java @@ -272,44 +272,6 @@ public static void munlock(ByteBuffer buffer, long len) munlock_native(buffer, len); } - /** - * Resource limit types copied from - */ - private static class ResourceLimit { - public static final int RLIMIT_CPU = 0; - public static final int RLIMIT_FSIZE = 1; - public static final int RLIMIT_DATA = 2; - public static final int RLIMIT_STACK = 3; - public static final int RLIMIT_CORE = 4; - public static final int RLIMIT_RSS = 5; - public static final int RLIMIT_NPROC = 6; - public static final int RLIMIT_NOFILE = 7; - public static final int RLIMIT_MEMLOCK = 8; - public static final int RLIMIT_AS = 9; - public static final int RLIMIT_LOCKS = 10; - public static final int RLIMIT_SIGPENDING = 11; - public static final int RLIMIT_MSGQUEUE = 12; - public static final int RLIMIT_NICE = 13; - public static final int RLIMIT_RTPRIO = 14; - public static final int RLIMIT_RTTIME = 15; - public static final int RLIMIT_NLIMITS = 16; - } - - static native String getrlimit(int limit) throws NativeIOException; - /** - * Returns the soft limit on the number of bytes that may be locked by the - * process in bytes (RLIMIT_MEMLOCK). - * - * See the getrlimit(2) man page for more information - * - * @return maximum amount of locked memory in bytes - */ - public static long getMemlockLimit() throws IOException { - assertCodeLoaded(); - String strLimit = getrlimit(ResourceLimit.RLIMIT_MEMLOCK); - return Long.parseLong(strLimit); - } - /** Linux only methods used for getOwner() implementation */ private static native long getUIDforFDOwnerforOwner(FileDescriptor fd) throws IOException; private static native String getUserName(long uid) throws IOException; @@ -563,6 +525,20 @@ public static boolean isAvailable() { /** Initialize the JNI method ID and class ID cache */ private static native void initNative(); + /** + * Get the maximum number of bytes that can be locked into memory at any + * given point. + * + * @return 0 if no bytes can be locked into memory; + * Long.MAX_VALUE if there is no limit; + * The number of bytes that can be locked into memory otherwise. + */ + public static long getMemlockLimit() { + return isAvailable() ? getMemlockLimit0() : 0; + } + + private static native long getMemlockLimit0(); + private static class CachedUid { final long timestamp; final String username; diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c index 56f0f71eb5..59a5f47622 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c @@ -16,8 +16,6 @@ * limitations under the License. */ -#define _GNU_SOURCE - #include "org_apache_hadoop.h" #include "org_apache_hadoop_io_nativeio_NativeIO.h" @@ -28,6 +26,7 @@ #include #include #include +#include #include #include #include @@ -414,36 +413,6 @@ Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_munlock_1native( } } -/** - * public static native String getrlimit( - * int resource); - * - * The "00024" in the function name is an artifact of how JNI encodes - * special characters. U+0024 is '$'. - */ -JNIEXPORT jstring JNICALL -Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_getrlimit( - JNIEnv *env, jclass clazz, - jint resource) -{ - jstring ret = NULL; - - struct rlimit rlim; - int rc = getrlimit((int)resource, &rlim); - if (rc != 0) { - throw_ioe(env, errno); - goto cleanup; - } - - // Convert soft limit into a string - char limit[17]; - int len = snprintf(&limit, 17, "%d", rlim.rlim_cur); - ret = (*env)->NewStringUTF(env,&limit); - -cleanup: - return ret; -} - #ifdef __FreeBSD__ static int toFreeBSDFlags(int flags) { @@ -1008,6 +977,24 @@ done: #endif } +JNIEXPORT jlong JNICALL +Java_org_apache_hadoop_io_nativeio_NativeIO_getMemlockLimit0( +JNIEnv *env, jclass clazz) +{ +#ifdef WINDOWS + return 0; +#else + struct rlimit rlim; + int rc = getrlimit(RLIMIT_MEMLOCK, &rlim); + if (rc != 0) { + throw_ioe(env, errno); + return 0; + } + return (rlim.rlim_cur == RLIM_INFINITY) ? + INT64_MAX : rlim.rlim_cur; +#endif +} + /** * vim: sw=2: ts=2: et: */ diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java index 917532e4bf..144cb9c2c4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java @@ -583,6 +583,6 @@ public void testMlock() throws Exception { @Test(timeout=10000) public void testGetMemlockLimit() throws Exception { assumeTrue(NativeIO.isAvailable()); - NativeIO.POSIX.getMemlockLimit(); + NativeIO.getMemlockLimit(); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt index 27f8c10dfe..06efddb276 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-4949.txt @@ -46,3 +46,7 @@ HDFS-4949 (Unreleased) cache report. (Contributed by Colin Patrick McCabe) HDFS-5195. Prevent passing null pointer to mlock and munlock. (cnauroth) + + HDFS-5201. NativeIO: consolidate getrlimit into NativeIO#getMemlockLimit + (Contributed by Colin Patrick McCabe) + diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java index 65a1c922b9..778820b0a8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java @@ -745,7 +745,7 @@ void startDataNode(Configuration conf, " size (%s) is greater than zero and native code is not available.", DFS_DATANODE_MAX_LOCKED_MEMORY_KEY)); } - long ulimit = NativeIO.POSIX.getMemlockLimit(); + long ulimit = NativeIO.getMemlockLimit(); if (dnConf.maxLockedMemory > ulimit) { throw new RuntimeException(String.format( "Cannot start datanode because the configured max locked memory" + diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeConfig.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeConfig.java index f2166b7411..4bdcfee635 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeConfig.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeConfig.java @@ -113,11 +113,15 @@ private static String makeURI(String scheme, String host, String path) @Test(timeout=60000) public void testMemlockLimit() throws Exception { assumeTrue(NativeIO.isAvailable()); - final long memlockLimit = NativeIO.POSIX.getMemlockLimit(); + final long memlockLimit = NativeIO.getMemlockLimit(); Configuration conf = cluster.getConfiguration(0); // Try starting the DN with limit configured to the ulimit conf.setLong(DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY, memlockLimit); + if (memlockLimit == Long.MAX_VALUE) { + // Can't increase the memlock limit past the maximum. + return; + } DataNode dn = null; dn = DataNode.createDataNode(new String[]{}, conf); dn.shutdown();