diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java index feefcef525..2b2d746af9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java @@ -232,7 +232,7 @@ public class Server { * path. */ private String checkAbsolutePath(String value, String name) { - if (!value.startsWith("/")) { + if (!new File(value).isAbsolute()) { throw new IllegalArgumentException( MessageFormat.format("[{0}] must be an absolute path [{1}]", name, value)); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java index 3d96fd8326..2ec1fcb933 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java @@ -364,7 +364,7 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase { } } - private void testSetPermission() throws Exception { + protected void testSetPermission() throws Exception { FileSystem fs = FileSystem.get(getProxiedFSConf()); Path path = new Path(getProxiedFSTestDir(), "foodir"); fs.mkdirs(path); diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystemLocalFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystemLocalFileSystem.java index 0cb0cc64b3..e9a80e3465 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystemLocalFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystemLocalFileSystem.java @@ -20,8 +20,13 @@ package org.apache.hadoop.fs.http.client; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.permission.FsAction; +import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.Path; import org.apache.hadoop.test.TestDirHelper; +import org.junit.Assert; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -65,21 +70,31 @@ public class TestHttpFSFileSystemLocalFileSystem extends BaseTestHttpFSWith { } protected Path addPrefix(Path path) { - URI uri = path.toUri(); - try { - if (uri.getAuthority() != null) { - uri = new URI(uri.getScheme(), - uri.getAuthority(), PATH_PREFIX + uri.getPath()); - } - else { - if (uri.getPath().startsWith("/")) { - uri = new URI(PATH_PREFIX + uri.getPath()); - } - } - } catch (URISyntaxException ex) { - throw new RuntimeException("It should not happen: " + ex.toString(), ex); - } - return new Path(uri); + return Path.mergePaths(new Path(PATH_PREFIX), path); } + @Override + protected void testSetPermission() throws Exception { + if (Path.WINDOWS) { + FileSystem fs = FileSystem.get(getProxiedFSConf()); + Path path = new Path(getProxiedFSTestDir(), "foodir"); + fs.mkdirs(path); + + fs = getHttpFSFileSystem(); + FsPermission permission1 = new FsPermission(FsAction.READ_WRITE, FsAction.NONE, FsAction.NONE); + fs.setPermission(path, permission1); + fs.close(); + + fs = FileSystem.get(getProxiedFSConf()); + FileStatus status1 = fs.getFileStatus(path); + fs.close(); + FsPermission permission2 = status1.getPermission(); + Assert.assertEquals(permission2, permission1); + + // sticky bit not supported on Windows with local file system, so the + // subclass skips that part of the test + } else { + super.testSetPermission(); + } + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/server/TestServer.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/server/TestServer.java index 2e28441d71..a6a139f23e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/server/TestServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/server/TestServer.java @@ -42,6 +42,7 @@ import org.apache.hadoop.test.HTestCase; import org.apache.hadoop.test.TestDir; import org.apache.hadoop.test.TestDirHelper; import org.apache.hadoop.test.TestException; +import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.StringUtils; import org.junit.Test; @@ -50,21 +51,24 @@ public class TestServer extends HTestCase { @Test @TestDir public void constructorsGetters() throws Exception { - Server server = new Server("server", "/a", "/b", "/c", "/d", new Configuration(false)); - assertEquals(server.getHomeDir(), "/a"); - assertEquals(server.getConfigDir(), "/b"); - assertEquals(server.getLogDir(), "/c"); - assertEquals(server.getTempDir(), "/d"); + Server server = new Server("server", getAbsolutePath("/a"), + getAbsolutePath("/b"), getAbsolutePath("/c"), getAbsolutePath("/d"), + new Configuration(false)); + assertEquals(server.getHomeDir(), getAbsolutePath("/a")); + assertEquals(server.getConfigDir(), getAbsolutePath("/b")); + assertEquals(server.getLogDir(), getAbsolutePath("/c")); + assertEquals(server.getTempDir(), getAbsolutePath("/d")); assertEquals(server.getName(), "server"); assertEquals(server.getPrefix(), "server"); assertEquals(server.getPrefixedName("name"), "server.name"); assertNotNull(server.getConfig()); - server = new Server("server", "/a", "/b", "/c", "/d"); - assertEquals(server.getHomeDir(), "/a"); - assertEquals(server.getConfigDir(), "/b"); - assertEquals(server.getLogDir(), "/c"); - assertEquals(server.getTempDir(), "/d"); + server = new Server("server", getAbsolutePath("/a"), getAbsolutePath("/b"), + getAbsolutePath("/c"), getAbsolutePath("/d")); + assertEquals(server.getHomeDir(), getAbsolutePath("/a")); + assertEquals(server.getConfigDir(), getAbsolutePath("/b")); + assertEquals(server.getLogDir(), getAbsolutePath("/c")); + assertEquals(server.getTempDir(), getAbsolutePath("/d")); assertEquals(server.getName(), "server"); assertEquals(server.getPrefix(), "server"); assertEquals(server.getPrefixedName("name"), "server.name"); @@ -793,4 +797,14 @@ public class TestServer extends HTestCase { server.destroy(); } + /** + * Creates an absolute path by appending the given relative path to the test + * root. + * + * @param relativePath String relative path + * @return String absolute path formed by appending relative path to test root + */ + private static String getAbsolutePath(String relativePath) { + return new File(TestDirHelper.getTestDir(), relativePath).getAbsolutePath(); + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/servlet/TestHostnameFilter.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/servlet/TestHostnameFilter.java index 3148d3a682..203796ead8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/servlet/TestHostnameFilter.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/servlet/TestHostnameFilter.java @@ -50,7 +50,10 @@ public class TestHostnameFilter extends HTestCase { @Override public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse) throws IOException, ServletException { - assertTrue(HostnameFilter.get().contains("localhost")); + // Hostname was set to "localhost", but may get resolved automatically to + // "127.0.0.1" depending on OS. + assertTrue(HostnameFilter.get().contains("localhost") || + HostnameFilter.get().contains("127.0.0.1")); invoked.set(true); } }; diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestDirHelper.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestDirHelper.java index 4d11691c67..d20658fe4e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestDirHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestDirHelper.java @@ -63,7 +63,7 @@ public class TestDirHelper implements MethodRule { static { try { TEST_DIR_ROOT = System.getProperty(TEST_DIR_PROP, new File("target").getAbsolutePath()); - if (!TEST_DIR_ROOT.startsWith("/")) { + if (!new File(TEST_DIR_ROOT).isAbsolute()) { System.err.println(MessageFormat.format("System property [{0}]=[{1}] must be set to an absolute path", TEST_DIR_PROP, TEST_DIR_ROOT)); System.exit(-1); diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfsHelper.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfsHelper.java index 26d253fecb..8e1fc2f239 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfsHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfsHelper.java @@ -82,7 +82,8 @@ public class TestHdfsHelper extends TestDirHelper { private Path resetHdfsTestDir(Configuration conf) { - Path testDir = new Path("./" + TEST_DIR_ROOT, testName + "-" + counter.getAndIncrement()); + Path testDir = new Path("/tmp/" + testName + "-" + + counter.getAndIncrement()); try { // currentUser FileSystem fs = FileSystem.get(conf); diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 0c5551dfdb..3760157078 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -327,6 +327,8 @@ Trunk (Unreleased) HDFS-4572. Fix TestJournal failures on Windows. (Arpit Agarwal via suresh) + HDFS-4287. HTTPFS tests fail on Windows. (Chris Nauroth via suresh) + Release 2.0.5-beta - UNRELEASED INCOMPATIBLE CHANGES