diff --git a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/nfs3/TestRpcProgramNfs3.java b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/nfs3/TestRpcProgramNfs3.java index 0fabb300db..f308763b24 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/nfs3/TestRpcProgramNfs3.java +++ b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/nfs3/TestRpcProgramNfs3.java @@ -749,7 +749,7 @@ public void testCommit() throws Exception { assertEquals("Incorrect COMMIT3Response:", null, response2); } - @Test(timeout=1000) + @Test(timeout=10000) public void testIdempotent() { Object[][] procedures = { { Nfs3Constant.NFSPROC3.NULL, 1 }, diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestSymlinkHdfsFileContext.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestSymlinkHdfsFileContext.java index d94a5145a9..2a3e4c3637 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestSymlinkHdfsFileContext.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestSymlinkHdfsFileContext.java @@ -34,7 +34,7 @@ public static void testSetup() throws Exception { wrapper = new FileContextTestWrapper(fc, "/tmp/TestSymlinkHdfsFileContext"); } - @Test(timeout=1000) + @Test(timeout=10000) /** Test access a symlink using AbstractFileSystem */ public void testAccessLinkFromAbstractFileSystem() throws IOException { Path file = new Path(testBaseDir1(), "file"); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestSymlinkHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestSymlinkHdfsFileSystem.java index bf42e24b01..fba9c42858 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestSymlinkHdfsFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestSymlinkHdfsFileSystem.java @@ -35,13 +35,13 @@ public static void testSetup() throws Exception { @Override @Ignore("FileSystem adds missing authority in absolute URIs") - @Test(timeout=1000) + @Test(timeout=10000) public void testCreateWithPartQualPathFails() throws IOException {} @Ignore("FileSystem#create creates parent directories," + " so dangling links to directories are created") @Override - @Test(timeout=1000) + @Test(timeout=10000) public void testCreateFileViaDanglingLinkParent() throws IOException {} // Additional tests for DFS-only methods diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java index 14ad6dd8d1..f811d3d675 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java @@ -860,7 +860,7 @@ public void testGetSpnegoKeytabKey() { DFSUtil.getSpnegoKeytabKey(conf, defaultKey)); } - @Test(timeout=1000) + @Test(timeout=10000) public void testDurationToString() throws Exception { assertEquals("000:00:00:00.000", DFSUtil.durationToString(0)); assertEquals("001:01:01:01.000", diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java index b9d25b2258..3974d0b859 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java @@ -157,7 +157,7 @@ private void readFile(FileSystem fileSys,Path name) throws IOException { * Test that capacity metrics are exported and pass * basic sanity tests. */ - @Test (timeout = 1800) + @Test (timeout = 10000) public void testCapacityMetrics() throws Exception { MetricsRecordBuilder rb = getMetrics(NS_METRICS); long capacityTotal = MetricsAsserts.getLongGauge("CapacityTotal", rb); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTokens.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTokens.java index 24de8ac911..1862f76a0c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTokens.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTokens.java @@ -130,7 +130,7 @@ private void checkNoTokenForOperation(HttpOpParam.Op op) throws IOException { verify(fs, never()).setDelegationToken(any()); } - @Test(timeout = 1000) + @Test(timeout = 10000) public void testGetOpRequireAuth() { for (HttpOpParam.Op op : GetOpParam.Op.values()) { boolean expect = (op == GetOpParam.Op.GETDELEGATIONTOKEN); @@ -138,7 +138,7 @@ public void testGetOpRequireAuth() { } } - @Test(timeout = 1000) + @Test(timeout = 10000) public void testPutOpRequireAuth() { for (HttpOpParam.Op op : PutOpParam.Op.values()) { boolean expect = (op == PutOpParam.Op.RENEWDELEGATIONTOKEN || op == PutOpParam.Op.CANCELDELEGATIONTOKEN); @@ -146,14 +146,14 @@ public void testPutOpRequireAuth() { } } - @Test(timeout = 1000) + @Test(timeout = 10000) public void testPostOpRequireAuth() { for (HttpOpParam.Op op : PostOpParam.Op.values()) { assertFalse(op.getRequireAuth()); } } - @Test(timeout = 1000) + @Test(timeout = 10000) public void testDeleteOpRequireAuth() { for (HttpOpParam.Op op : DeleteOpParam.Op.values()) { assertFalse(op.getRequireAuth());