HADOOP-11852. Disable symlinks in trunk.

This commit is contained in:
Andrew Wang 2015-04-23 11:47:01 -07:00
parent f5fe35e297
commit 26971e52ae
15 changed files with 67 additions and 2 deletions

View File

@ -201,6 +201,8 @@ Trunk (Unreleased)
HADOOP-11850. Typos in hadoop-common java docs. (Surendra Singh Lilhore HADOOP-11850. Typos in hadoop-common java docs. (Surendra Singh Lilhore
via jghoman) via jghoman)
HADOOP-11852. Disable symlinks in trunk.
BUG FIXES BUG FIXES
HADOOP-11473. test-patch says "-1 overall" even when all checks are +1 HADOOP-11473. test-patch says "-1 overall" even when all checks are +1

View File

@ -95,6 +95,10 @@ public T resolve(final FileContext fc, final Path path) throws IOException {
+ " and symlink resolution is disabled (" + " and symlink resolution is disabled ("
+ CommonConfigurationKeys.FS_CLIENT_RESOLVE_REMOTE_SYMLINKS_KEY + ").", e); + CommonConfigurationKeys.FS_CLIENT_RESOLVE_REMOTE_SYMLINKS_KEY + ").", e);
} }
if (!FileSystem.areSymlinksEnabled()) {
throw new IOException("Symlink resolution is disabled in"
+ " this version of Hadoop.");
}
if (count++ > FsConstants.MAX_PATH_LINKS) { if (count++ > FsConstants.MAX_PATH_LINKS) {
throw new IOException("Possible cyclic loop while " + throw new IOException("Possible cyclic loop while " +
"following symbolic link " + path); "following symbolic link " + path);

View File

@ -1431,11 +1431,15 @@ public FsStatus next(final AbstractFileSystem fs, final Path p)
* <code>target</code> or <code>link</code> is not supported * <code>target</code> or <code>link</code> is not supported
* @throws IOException If an I/O error occurred * @throws IOException If an I/O error occurred
*/ */
@SuppressWarnings("deprecation")
public void createSymlink(final Path target, final Path link, public void createSymlink(final Path target, final Path link,
final boolean createParent) throws AccessControlException, final boolean createParent) throws AccessControlException,
FileAlreadyExistsException, FileNotFoundException, FileAlreadyExistsException, FileNotFoundException,
ParentNotDirectoryException, UnsupportedFileSystemException, ParentNotDirectoryException, UnsupportedFileSystemException,
IOException { IOException {
if (!FileSystem.areSymlinksEnabled()) {
throw new UnsupportedOperationException("Symlinks not supported");
}
final Path nonRelLink = fixRelativePart(link); final Path nonRelLink = fixRelativePart(link);
new FSLinkResolver<Void>() { new FSLinkResolver<Void>() {
@Override @Override

View File

@ -3296,4 +3296,19 @@ void printStatistics() throws IOException {
": " + pair.getValue()); ": " + pair.getValue());
} }
} }
// Symlinks are temporarily disabled - see HADOOP-10020 and HADOOP-10052
private static boolean symlinksEnabled = false;
private static Configuration conf = null;
@VisibleForTesting
public static boolean areSymlinksEnabled() {
return symlinksEnabled;
}
@VisibleForTesting
public static void enableSymlinks() {
symlinksEnabled = true;
}
} }

View File

@ -87,6 +87,10 @@ public T resolve(final FileSystem filesys, final Path path)
+ CommonConfigurationKeys.FS_CLIENT_RESOLVE_REMOTE_SYMLINKS_KEY + CommonConfigurationKeys.FS_CLIENT_RESOLVE_REMOTE_SYMLINKS_KEY
+ ").", e); + ").", e);
} }
if (!FileSystem.areSymlinksEnabled()) {
throw new IOException("Symlink resolution is disabled in" +
" this version of Hadoop.");
}
if (count++ > FsConstants.MAX_PATH_LINKS) { if (count++ > FsConstants.MAX_PATH_LINKS) {
throw new IOException("Possible cyclic loop while " + throw new IOException("Possible cyclic loop while " +
"following symbolic link " + path); "following symbolic link " + path);

View File

@ -775,9 +775,13 @@ public boolean supportsSymlinks() {
return true; return true;
} }
@SuppressWarnings("deprecation")
@Override @Override
public void createSymlink(Path target, Path link, boolean createParent) public void createSymlink(Path target, Path link, boolean createParent)
throws IOException { throws IOException {
if (!FileSystem.areSymlinksEnabled()) {
throw new UnsupportedOperationException("Symlinks not supported");
}
final String targetScheme = target.toUri().getScheme(); final String targetScheme = target.toUri().getScheme();
if (targetScheme != null && !"file".equals(targetScheme)) { if (targetScheme != null && !"file".equals(targetScheme)) {
throw new IOException("Unable to create symlink to non-local file "+ throw new IOException("Unable to create symlink to non-local file "+

View File

@ -36,6 +36,10 @@
* Base test for symbolic links * Base test for symbolic links
*/ */
public abstract class SymlinkBaseTest { public abstract class SymlinkBaseTest {
// Re-enable symlinks for tests, see HADOOP-10020 and HADOOP-10052
static {
FileSystem.enableSymlinks();
}
static final long seed = 0xDEADBEEFL; static final long seed = 0xDEADBEEFL;
static final int blockSize = 8192; static final int blockSize = 8192;
static final int fileSize = 16384; static final int fileSize = 16384;

View File

@ -31,7 +31,9 @@
* Tests resolution of AbstractFileSystems for a given path with symlinks. * Tests resolution of AbstractFileSystems for a given path with symlinks.
*/ */
public class TestFileContextResolveAfs { public class TestFileContextResolveAfs {
static {
FileSystem.enableSymlinks();
}
private static String TEST_ROOT_DIR_LOCAL private static String TEST_ROOT_DIR_LOCAL
= System.getProperty("test.build.data","/tmp"); = System.getProperty("test.build.data","/tmp");

View File

@ -32,7 +32,9 @@
import org.junit.Test; import org.junit.Test;
public class TestStat extends FileSystemTestHelper { public class TestStat extends FileSystemTestHelper {
static {
FileSystem.enableSymlinks();
}
private static Stat stat; private static Stat stat;
@BeforeClass @BeforeClass

View File

@ -1286,12 +1286,16 @@ public FileStatus next(final FileSystem fs, final Path p)
}.resolve(this, absF); }.resolve(this, absF);
} }
@SuppressWarnings("deprecation")
@Override @Override
public void createSymlink(final Path target, final Path link, public void createSymlink(final Path target, final Path link,
final boolean createParent) throws AccessControlException, final boolean createParent) throws AccessControlException,
FileAlreadyExistsException, FileNotFoundException, FileAlreadyExistsException, FileNotFoundException,
ParentNotDirectoryException, UnsupportedFileSystemException, ParentNotDirectoryException, UnsupportedFileSystemException,
IOException { IOException {
if (!FileSystem.areSymlinksEnabled()) {
throw new UnsupportedOperationException("Symlinks not supported");
}
statistics.incrementWriteOps(1); statistics.incrementWriteOps(1);
final Path absF = fixRelativePart(link); final Path absF = fixRelativePart(link);
new FileSystemLinkResolver<Void>() { new FileSystemLinkResolver<Void>() {

View File

@ -33,6 +33,7 @@
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.XAttrSetFlag; import org.apache.hadoop.fs.XAttrSetFlag;
import org.apache.hadoop.hdfs.protocol.HdfsConstantsClient; import org.apache.hadoop.hdfs.protocol.HdfsConstantsClient;
import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlock;
@ -630,6 +631,9 @@ fsDir, renameReservedPathsOnUpgrade(timesOp.path, logVersion),
break; break;
} }
case OP_SYMLINK: { case OP_SYMLINK: {
if (!FileSystem.areSymlinksEnabled()) {
throw new IOException("Symlinks not supported - please remove symlink before upgrading to this version of HDFS");
}
SymlinkOp symlinkOp = (SymlinkOp)op; SymlinkOp symlinkOp = (SymlinkOp)op;
inodeId = getAndUpdateLastInodeId(symlinkOp.inodeId, logVersion, inodeId = getAndUpdateLastInodeId(symlinkOp.inodeId, logVersion,
lastInodeId); lastInodeId);

View File

@ -42,6 +42,7 @@
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathIsNotDirectoryException; import org.apache.hadoop.fs.PathIsNotDirectoryException;
import org.apache.hadoop.fs.UnresolvedLinkException; import org.apache.hadoop.fs.UnresolvedLinkException;
@ -723,6 +724,7 @@ public INode loadINodeWithLocalName(boolean isSnapshotINode,
* @param counter Counter to increment for namenode startup progress * @param counter Counter to increment for namenode startup progress
* @return an inode * @return an inode
*/ */
@SuppressWarnings("deprecation")
INode loadINode(final byte[] localName, boolean isSnapshotINode, INode loadINode(final byte[] localName, boolean isSnapshotINode,
DataInput in, Counter counter) throws IOException { DataInput in, Counter counter) throws IOException {
final int imgVersion = getLayoutVersion(); final int imgVersion = getLayoutVersion();
@ -836,6 +838,9 @@ INode loadINode(final byte[] localName, boolean isSnapshotINode,
return dir; return dir;
} else if (numBlocks == -2) { } else if (numBlocks == -2) {
//symlink //symlink
if (!FileSystem.areSymlinksEnabled()) {
throw new IOException("Symlinks not supported - please remove symlink before upgrading to this version of HDFS");
}
final String symlink = Text.readString(in); final String symlink = Text.readString(in);
final PermissionStatus permissions = PermissionStatus.read(in); final PermissionStatus permissions = PermissionStatus.read(in);

View File

@ -147,6 +147,7 @@
import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileEncryptionInfo; import org.apache.hadoop.fs.FileEncryptionInfo;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsServerDefaults; import org.apache.hadoop.fs.FsServerDefaults;
import org.apache.hadoop.fs.InvalidPathException; import org.apache.hadoop.fs.InvalidPathException;
import org.apache.hadoop.fs.Options; import org.apache.hadoop.fs.Options;
@ -2131,6 +2132,9 @@ boolean shouldCopyOnTruncate(INodeFile file, BlockInfoContiguous blk) {
void createSymlink(String target, String link, void createSymlink(String target, String link,
PermissionStatus dirPerms, boolean createParent, boolean logRetryCache) PermissionStatus dirPerms, boolean createParent, boolean logRetryCache)
throws IOException { throws IOException {
if (!FileSystem.areSymlinksEnabled()) {
throw new UnsupportedOperationException("Symlinks not supported");
}
waitForLoadingFSImage(); waitForLoadingFSImage();
HdfsFileStatus auditStat = null; HdfsFileStatus auditStat = null;
checkOperation(OperationCategory.WRITE); checkOperation(OperationCategory.WRITE);

View File

@ -769,6 +769,9 @@ private void initMiniDFSCluster(
try { try {
ExitUtil.disableSystemExit(); ExitUtil.disableSystemExit();
// Re-enable symlinks for tests, see HADOOP-10020 and HADOOP-10052
FileSystem.enableSymlinks();
synchronized (MiniDFSCluster.class) { synchronized (MiniDFSCluster.class) {
instanceId = instanceCount++; instanceId = instanceCount++;
} }

View File

@ -71,6 +71,10 @@
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
public class TestINodeFile { public class TestINodeFile {
// Re-enable symlinks for tests, see HADOOP-10020 and HADOOP-10052
static {
FileSystem.enableSymlinks();
}
public static final Log LOG = LogFactory.getLog(TestINodeFile.class); public static final Log LOG = LogFactory.getLog(TestINodeFile.class);
static final short BLOCKBITS = 48; static final short BLOCKBITS = 48;