From 9c2f4f634db124282f114a44b2e7dfc899693c1d Mon Sep 17 00:00:00 2001 From: Eli Collins Date: Mon, 21 Nov 2011 07:01:29 +0000 Subject: [PATCH] HDFS-2514. Link resolution bug for intermediate symlinks with relative targets. Contributed by Eli Collins git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1204370 13f79535-47bb-0310-9956-ffa450edef68 --- .../hadoop-common/CHANGES.txt | 3 ++ .../java/org/apache/hadoop/util/RunJar.java | 13 +++++- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 ++ .../hadoop/hdfs/protocol/ClientProtocol.java | 18 ++++---- .../protocol/UnresolvedPathException.java | 42 +++++++++++-------- .../hdfs/server/namenode/FSNamesystem.java | 6 ++- .../hadoop/hdfs/server/namenode/INode.java | 10 +++-- .../hdfs/server/namenode/INodeDirectory.java | 21 +++++----- .../server/namenode/NameNodeRpcServer.java | 4 -- .../apache/hadoop/fs/TestFcHdfsSymlink.java | 15 +++++-- 10 files changed, 86 insertions(+), 49 deletions(-) diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index f023cfe0a1..1babc9a2ef 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -118,6 +118,9 @@ Release 0.23.1 - Unreleased HADOOP-7802. Hadoop scripts unconditionally source "$bin"/../libexec/hadoop-config.sh. (Bruno Mahé via tomwhite) + HADOOP-6614. RunJar should provide more diags when it can't create + a temp file. (Jonathan Hsieh via eli) + OPTIMIZATIONS BUG FIXES diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java index 46993e16aa..a785cacc34 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java @@ -149,7 +149,18 @@ public static void main(String[] args) throws Throwable { File tmpDir = new File(new Configuration().get("hadoop.tmp.dir")); ensureDirectory(tmpDir); - final File workDir = File.createTempFile("hadoop-unjar", "", tmpDir); + final File workDir; + try { + workDir = File.createTempFile("hadoop-unjar", "", tmpDir); + } catch (IOException ioe) { + // If user has insufficient perms to write to tmpDir, default + // "Permission denied" message doesn't specify a filename. + System.err.println("Error creating temp dir in hadoop.tmp.dir " + + tmpDir + " due to " + ioe.getMessage()); + System.exit(-1); + return; + } + if (!workDir.delete()) { System.err.println("Delete failed for " + workDir); System.exit(-1); diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 65931cd82f..7e7be88a29 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -1934,6 +1934,9 @@ Release 0.22.0 - Unreleased HDFS-2002. Incorrect computation of needed blocks in getTurnOffTip(). (Plamen Jeliazkov via shv) + HDFS-2514. Link resolution bug for intermediate symlinks with + relative targets. (eli) + Release 0.21.1 - Unreleased HDFS-1466. TestFcHdfsSymlink relies on /tmp/test not existing. (eli) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java index b9f8bec13d..5dbae4e168 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java @@ -828,9 +828,9 @@ public void setTimes(String src, long mtime, long atime) /** * Create symlink to a file or directory. - * @param target The pathname of the destination that the + * @param target The path of the destination that the * link points to. - * @param link The pathname of the link being created. + * @param link The path of the link being created. * @param dirPerm permissions to use when creating parent directories * @param createParent - if true then missing parent dirs are created * if false then parent must exist @@ -851,14 +851,16 @@ public void createSymlink(String target, String link, FsPermission dirPerm, IOException; /** - * Resolve the first symbolic link on the specified path. - * @param path The pathname that needs to be resolved - * - * @return The pathname after resolving the first symbolic link if any. - * + * Return the target of the given symlink. If there is an intermediate + * symlink in the path (ie a symlink leading up to the final path component) + * then the given path is returned with this symlink resolved. + * + * @param path The path with a link that needs resolution. + * @return The path after resolving the first symbolic link in the path. * @throws AccessControlException permission denied * @throws FileNotFoundException If path does not exist - * @throws IOException If an I/O error occurred + * @throws IOException If the given path does not refer to a symlink + * or an I/O error occurred */ public String getLinkTarget(String path) throws AccessControlException, FileNotFoundException, IOException; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/UnresolvedPathException.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/UnresolvedPathException.java index 9726ab70a4..03fb704934 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/UnresolvedPathException.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/UnresolvedPathException.java @@ -32,10 +32,10 @@ @InterfaceStability.Evolving public final class UnresolvedPathException extends UnresolvedLinkException { private static final long serialVersionUID = 1L; - private String originalPath; // The original path containing the link - private String linkTarget; // The target of the link - private String remainingPath; // The path part following the link - + private String path; // The path containing the link + private String preceding; // The path part preceding the link + private String remainder; // The path part following the link + private String linkTarget; // The link's target /** * Used by RemoteException to instantiate an UnresolvedPathException. @@ -44,22 +44,30 @@ public UnresolvedPathException(String msg) { super(msg); } - public UnresolvedPathException(String originalPath, String remainingPath, - String linkTarget) { - this.originalPath = originalPath; - this.remainingPath = remainingPath; - this.linkTarget = linkTarget; + public UnresolvedPathException(String path, String preceding, + String remainder, String linkTarget) { + this.path = path; + this.preceding = preceding; + this.remainder = remainder; + this.linkTarget = linkTarget; } - public Path getUnresolvedPath() throws IOException { - return new Path(originalPath); - } - + /** + * Return a path with the link resolved with the target. + */ public Path getResolvedPath() throws IOException { - if (remainingPath == null || "".equals(remainingPath)) { - return new Path(linkTarget); + // If the path is absolute we cam throw out the preceding part and + // just append the remainder to the target, otherwise append each + // piece to resolve the link in path. + boolean noRemainder = (remainder == null || "".equals(remainder)); + Path target = new Path(linkTarget); + if (target.isUriPathAbsolute()) { + return noRemainder ? target : new Path(target, remainder); + } else { + return noRemainder + ? new Path(preceding, target) + : new Path(new Path(preceding, linkTarget), remainder); } - return new Path(linkTarget, remainingPath); } @Override @@ -68,7 +76,7 @@ public String getMessage() { if (msg != null) { return msg; } - String myMsg = "Unresolved path " + originalPath; + String myMsg = "Unresolved path " + path; try { return getResolvedPath().toString(); } catch (IOException e) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java index fdb2c32550..2d264131c4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java @@ -2016,10 +2016,12 @@ void removePathAndBlocks(String src, List blocks) { } } - /** Get the file info for a specific file. + /** + * Get the file info for a specific file. + * * @param src The string representation of the path to the file * @param resolveLink whether to throw UnresolvedLinkException - * if src refers to a symlinks + * if src refers to a symlink * * @throws AccessControlException if access is denied * @throws UnresolvedLinkException if a symlink is encountered. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java index 83d9858586..9885f23f92 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java @@ -372,14 +372,16 @@ static String[] getPathNames(String path) { /** * Given some components, create a path name. - * @param components + * @param components The path components + * @param start index + * @param end index * @return concatenated path */ - static String constructPath(byte[][] components, int start) { + static String constructPath(byte[][] components, int start, int end) { StringBuilder buf = new StringBuilder(); - for (int i = start; i < components.length; i++) { + for (int i = start; i < end; i++) { buf.append(DFSUtil.bytes2String(components[i])); - if (i < components.length - 1) { + if (i < end - 1) { buf.append(Path.SEPARATOR); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java index 1b771735ad..f4d9e78f88 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java @@ -189,18 +189,19 @@ assert compareBytes(this.name, components[0]) == 0 : existing[index] = curNode; } if (curNode.isLink() && (!lastComp || (lastComp && resolveLink))) { - if(NameNode.stateChangeLog.isDebugEnabled()) { + final String path = constructPath(components, 0, components.length); + final String preceding = constructPath(components, 0, count); + final String remainder = + constructPath(components, count + 1, components.length); + final String link = DFSUtil.bytes2String(components[count]); + final String target = ((INodeSymlink)curNode).getLinkValue(); + if (NameNode.stateChangeLog.isDebugEnabled()) { NameNode.stateChangeLog.debug("UnresolvedPathException " + - " count: " + count + - " componenent: " + DFSUtil.bytes2String(components[count]) + - " full path: " + constructPath(components, 0) + - " remaining path: " + constructPath(components, count+1) + - " symlink: " + ((INodeSymlink)curNode).getLinkValue()); + " path: " + path + " preceding: " + preceding + + " count: " + count + " link: " + link + " target: " + target + + " remainder: " + remainder); } - final String linkTarget = ((INodeSymlink)curNode).getLinkValue(); - throw new UnresolvedPathException(constructPath(components, 0), - constructPath(components, count+1), - linkTarget); + throw new UnresolvedPathException(path, preceding, remainder, target); } if (lastComp || !curNode.isDirectory()) { break; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java index aadc9968d2..37cd684116 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java @@ -766,10 +766,6 @@ public void createSymlink(String target, String link, FsPermission dirPerms, @Override // ClientProtocol public String getLinkTarget(String path) throws IOException { metrics.incrGetLinkTargetOps(); - /* Resolves the first symlink in the given path, returning a - * new path consisting of the target of the symlink and any - * remaining path components from the original path. - */ try { HdfsFileStatus stat = namesystem.getFileInfo(path, false); if (stat != null) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestFcHdfsSymlink.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestFcHdfsSymlink.java index 617b90026c..3f74789ae9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestFcHdfsSymlink.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestFcHdfsSymlink.java @@ -20,6 +20,9 @@ import java.io.*; import java.net.URI; +import org.apache.commons.logging.impl.Log4JLogger; +import org.apache.log4j.Level; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileContext; @@ -28,9 +31,11 @@ import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; +import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import static org.apache.hadoop.fs.FileContextTestHelper.*; import org.apache.hadoop.ipc.RemoteException; + import static org.junit.Assert.*; import org.junit.Test; import org.junit.BeforeClass; @@ -41,6 +46,10 @@ */ public class TestFcHdfsSymlink extends FileContextSymlinkBaseTest { + { + ((Log4JLogger)NameNode.stateChangeLog).getLogger().setLevel(Level.ALL); + } + private static MiniDFSCluster cluster; protected String getScheme() { @@ -250,8 +259,8 @@ public void testLinkOwner() throws IOException { Path link = new Path(testBaseDir1(), "symlinkToFile"); createAndWriteFile(file); fc.createSymlink(file, link, false); - FileStatus stat_file = fc.getFileStatus(file); - FileStatus stat_link = fc.getFileStatus(link); - assertEquals(stat_link.getOwner(), stat_file.getOwner()); + FileStatus statFile = fc.getFileStatus(file); + FileStatus statLink = fc.getFileStatus(link); + assertEquals(statLink.getOwner(), statFile.getOwner()); } }