From f4d31cd029f8369b8e41de2667f6ba2bd95ca701 Mon Sep 17 00:00:00 2001 From: Christopher Douglas Date: Fri, 26 Mar 2010 08:25:00 +0000 Subject: [PATCH] HADOOP-6566. Add methods supporting, enforcing narrower permissions on local daemon directories. Contributed by Arun Murthy and Luke Lu git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@927728 13f79535-47bb-0310-9956-ffa450edef68 --- CHANGES.txt | 3 + .../hadoop/fs/permission/FsPermission.java | 11 +- .../org/apache/hadoop/util/DiskChecker.java | 77 ++++++++++ .../org/apache/hadoop/test/MockitoMaker.java | 132 ++++++++++++++++++ .../apache/hadoop/util/TestDiskChecker.java | 127 +++++++++++++++++ 5 files changed, 349 insertions(+), 1 deletion(-) create mode 100644 src/test/core/org/apache/hadoop/test/MockitoMaker.java create mode 100644 src/test/core/org/apache/hadoop/util/TestDiskChecker.java diff --git a/CHANGES.txt b/CHANGES.txt index 73eb794433..155a3a0e18 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -204,6 +204,9 @@ Trunk (unreleased changes) HADOOP-6646. Move HarfileSystem out of Hadoop Common. (mahadev) + HADOOP-6566. Add methods supporting, enforcing narrower permissions on + local daemon directories. (Arun Murthy and Luke Lu via cdouglas) + OPTIMIZATIONS HADOOP-6467. Improve the performance on HarFileSystem.listStatus(..). diff --git a/src/java/org/apache/hadoop/fs/permission/FsPermission.java b/src/java/org/apache/hadoop/fs/permission/FsPermission.java index 50cf61816a..572441c5b6 100644 --- a/src/java/org/apache/hadoop/fs/permission/FsPermission.java +++ b/src/java/org/apache/hadoop/fs/permission/FsPermission.java @@ -90,6 +90,15 @@ public FsPermission(FsPermission other) { this.otheraction = other.otheraction; } + /** + * Construct by given mode, either in octal or symbolic format. + * @param mode mode as a string, either in octal or symbolic format + * @throws IllegalArgumentException if mode is invalid + */ + public FsPermission(String mode) { + this(new UmaskParser(mode).getUMask()); + } + /** Return user {@link FsAction}. */ public FsAction getUserAction() {return useraction;} @@ -199,7 +208,7 @@ public static FsPermission getUMask(Configuration conf) { if(conf.deprecatedKeyWasSet(DEPRECATED_UMASK_LABEL)) umask = Integer.parseInt(confUmask); // Evaluate as decimal value else - umask = new UmaskParser(confUmask).getUMask(); + return new FsPermission(confUmask); } catch(IllegalArgumentException iae) { // Provide more explanation for user-facing message String type = iae instanceof NumberFormatException ? "decimal" diff --git a/src/java/org/apache/hadoop/util/DiskChecker.java b/src/java/org/apache/hadoop/util/DiskChecker.java index 4c471dbce8..6b8652b7f9 100644 --- a/src/java/org/apache/hadoop/util/DiskChecker.java +++ b/src/java/org/apache/hadoop/util/DiskChecker.java @@ -21,6 +21,12 @@ import java.io.File; import java.io.IOException; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.LocalFileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.FsAction; +import org.apache.hadoop.fs.permission.FsPermission; + /** * Class that provides utility functions for checking disk problem */ @@ -68,6 +74,11 @@ public static boolean mkdirsWithExistsCheck(File dir) { (canonDir.mkdir() || canonDir.exists())); } + /** + * Create the directory if it doesn't exist and + * @param dir + * @throws DiskErrorException + */ public static void checkDir(File dir) throws DiskErrorException { if (!mkdirsWithExistsCheck(dir)) throw new DiskErrorException("can not create directory: " @@ -86,4 +97,70 @@ public static void checkDir(File dir) throws DiskErrorException { + dir.toString()); } + /** + * Create the directory or check permissions if it already exists. + * + * The semantics of mkdirsWithExistsAndPermissionCheck method is different + * from the mkdirs method provided in the Sun's java.io.File class in the + * following way: + * While creating the non-existent parent directories, this method checks for + * the existence of those directories if the mkdir fails at any point (since + * that directory might have just been created by some other process). + * If both mkdir() and the exists() check fails for any seemingly + * non-existent directory, then we signal an error; Sun's mkdir would signal + * an error (return false) if a directory it is attempting to create already + * exists or the mkdir fails. + * + * @param localFS local filesystem + * @param dir directory to be created or checked + * @param expected expected permission + * @throws IOException + */ + public static void mkdirsWithExistsAndPermissionCheck( + LocalFileSystem localFS, Path dir, FsPermission expected) + throws IOException { + File directory = localFS.pathToFile(dir); + boolean created = false; + + if (!directory.exists()) + created = mkdirsWithExistsCheck(directory); + + if (created || !localFS.getFileStatus(dir).getPermission().equals(expected)) + localFS.setPermission(dir, expected); + } + + /** + * Create the local directory if necessary, check permissions and also ensure + * it can be read from and written into. + * + * @param localFS local filesystem + * @param dir directory + * @param expected permission + * @throws DiskErrorException + * @throws IOException + */ + public static void checkDir(LocalFileSystem localFS, Path dir, + FsPermission expected) + throws DiskErrorException, IOException { + mkdirsWithExistsAndPermissionCheck(localFS, dir, expected); + + FileStatus stat = localFS.getFileStatus(dir); + FsPermission actual = stat.getPermission(); + + if (!stat.isDir()) + throw new DiskErrorException("not a directory: "+ dir.toString()); + + FsAction user = actual.getUserAction(); + if (!user.implies(FsAction.READ)) + throw new DiskErrorException("directory is not readable: " + + dir.toString()); + + if (!user.implies(FsAction.WRITE)) + throw new DiskErrorException("directory is not writable: " + + dir.toString()); + + if (!user.implies(FsAction.EXECUTE)) + throw new DiskErrorException("directory is not listable: " + + dir.toString()); + } } diff --git a/src/test/core/org/apache/hadoop/test/MockitoMaker.java b/src/test/core/org/apache/hadoop/test/MockitoMaker.java new file mode 100644 index 0000000000..28c2011c26 --- /dev/null +++ b/src/test/core/org/apache/hadoop/test/MockitoMaker.java @@ -0,0 +1,132 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.test; + +import static org.mockito.Mockito.*; + +/** + * Helper class to create one-liner stubs, so that instead of:
+ * SomeType someDescriptiveMock = mock(SomeType.class);
+ * when(someDescriptiveMock.someMethod()).thenReturn(someValue);
+ *

You can now do:

+ * SomeType someDescriptiveMock = make(stub(SomeType.class)
+ *     .returning(someValue).from.someMethod());
+ */ +public class MockitoMaker { + + /** + * Create a mock object from a mocked method call. + * + * @param type of mocked object + * @param methodCall for mocked object + * @return mocked object + */ + @SuppressWarnings("unchecked") + public static T make(Object methodCall) { + StubBuilder sb = StubBuilder.current(); + when(methodCall).thenReturn(sb.firstReturn, sb.laterReturns); + return (T) StubBuilder.current().from; + } + + /** + * Create a stub builder of a mocked object. + * + * @param type of the target object to be mocked + * @param target class of the target object to be mocked + * @return the stub builder of the mocked object + */ + public static StubBuilder stub(Class target) { + return new StubBuilder(mock(target)); + } + + /** + * Builder class for stubs + * @param type of the object to be mocked + */ + public static class StubBuilder { + + /** + * The target mock object + */ + public final T from; + + // We want to be able to use this even when the tests are run in parallel. + @SuppressWarnings("rawtypes") + private static final ThreadLocal tls = + new ThreadLocal() { + @Override protected StubBuilder initialValue() { + return new StubBuilder(); + } + }; + + private Object firstReturn = null; + private Object[] laterReturns = {}; + + /** + * Default constructor for the initial stub builder + */ + public StubBuilder() { + this.from = null; + } + + /** + * Construct a stub builder with a mock instance + * + * @param mockInstance the mock object + */ + public StubBuilder(T mockInstance) { + tls.set(this); + this.from = mockInstance; + } + + /** + * Get the current stub builder from thread local + * + * @param + * @return the stub builder of the mocked object + */ + @SuppressWarnings("unchecked") + public static StubBuilder current() { + return tls.get(); + } + + /** + * Set the return value for the current stub builder + * + * @param value the return value + * @return the stub builder + */ + public StubBuilder returning(Object value) { + this.firstReturn = value; + return this; + } + + /** + * Set the return values for the current stub builder + * + * @param value the first return value + * @param values the return values for later invocations + * @return the stub builder + */ + public StubBuilder returning(Object value, Object... values) { + this.firstReturn = value; + this.laterReturns = values; + return this; + } + } +} diff --git a/src/test/core/org/apache/hadoop/util/TestDiskChecker.java b/src/test/core/org/apache/hadoop/util/TestDiskChecker.java new file mode 100644 index 0000000000..65fb712213 --- /dev/null +++ b/src/test/core/org/apache/hadoop/util/TestDiskChecker.java @@ -0,0 +1,127 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.util; + +import java.io.*; + +import org.junit.Test; +import static org.junit.Assert.*; + +import static org.mockito.Mockito.*; + +import static org.apache.hadoop.test.MockitoMaker.*; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.fs.LocalFileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.util.DiskChecker.DiskErrorException; + +public class TestDiskChecker { + final FsPermission defaultPerm = new FsPermission("755"); + final FsPermission invalidPerm = new FsPermission("000"); + + @Test public void testMkdirs_dirExists() throws Throwable { + _mkdirs(true, defaultPerm, defaultPerm); + } + + @Test public void testMkdirs_noDir() throws Throwable { + _mkdirs(false, defaultPerm, defaultPerm); + } + + @Test public void testMkdirs_dirExists_badUmask() throws Throwable { + _mkdirs(true, defaultPerm, invalidPerm); + } + + @Test public void testMkdirs_noDir_badUmask() throws Throwable { + _mkdirs(false, defaultPerm, invalidPerm); + } + + private void _mkdirs(boolean exists, FsPermission before, FsPermission after) + throws Throwable { + File localDir = make(stub(File.class).returning(exists).from.exists()); + when(localDir.mkdir()).thenReturn(true); + Path dir = mock(Path.class); // use default stubs + LocalFileSystem fs = make(stub(LocalFileSystem.class) + .returning(localDir).from.pathToFile(dir)); + FileStatus stat = make(stub(FileStatus.class) + .returning(after).from.getPermission()); + when(fs.getFileStatus(dir)).thenReturn(stat); + + try { + DiskChecker.mkdirsWithExistsAndPermissionCheck(fs, dir, before); + + if (!exists) + verify(fs).setPermission(dir, before); + else { + verify(fs).getFileStatus(dir); + verify(stat).getPermission(); + } + } + catch (DiskErrorException e) { + if (before != after) + assertTrue(e.getMessage().startsWith("Incorrect permission")); + } + } + + @Test public void testCheckDir_normal() throws Throwable { + _checkDirs(true, new FsPermission("755"), true); + } + + @Test public void testCheckDir_notDir() throws Throwable { + _checkDirs(false, new FsPermission("000"), false); + } + + @Test public void testCheckDir_notReadable() throws Throwable { + _checkDirs(true, new FsPermission("000"), false); + } + + @Test public void testCheckDir_notWritable() throws Throwable { + _checkDirs(true, new FsPermission("444"), false); + } + + @Test public void testCheckDir_notListable() throws Throwable { + _checkDirs(true, new FsPermission("666"), false); // not listable + } + + private void _checkDirs(boolean isDir, FsPermission perm, boolean success) + throws Throwable { + File localDir = make(stub(File.class).returning(true).from.exists()); + when(localDir.mkdir()).thenReturn(true); + Path dir = mock(Path.class); + LocalFileSystem fs = make(stub(LocalFileSystem.class) + .returning(localDir).from.pathToFile(dir)); + FileStatus stat = make(stub(FileStatus.class) + .returning(perm).from.getPermission()); + when(stat.isDir()).thenReturn(isDir); + when(fs.getFileStatus(dir)).thenReturn(stat); + + try { + DiskChecker.checkDir(fs, dir, perm); + + verify(stat).isDir(); + verify(fs, times(2)).getFileStatus(dir); + verify(stat, times(2)).getPermission(); + assertTrue("checkDir success", success); + } + catch (DiskErrorException e) { + assertFalse("checkDir success", success); + e.printStackTrace(); + } + System.out.println("checkDir success: "+ success); + } +}