HDFS-6348. SecondaryNameNode not terminating properly on runtime exceptions (Contributed by Rakesh R)
This commit is contained in:
parent
0790275f05
commit
93972a332a
@ -1,4 +1,4 @@
|
|||||||
Hadoop HDFS Change Log
|
Hadoop HDFS Change Log
|
||||||
|
|
||||||
Trunk (Unreleased)
|
Trunk (Unreleased)
|
||||||
|
|
||||||
@ -788,6 +788,9 @@ Release 2.8.0 - UNRELEASED
|
|||||||
HDFS-8403. Eliminate retries in TestFileCreation
|
HDFS-8403. Eliminate retries in TestFileCreation
|
||||||
#testOverwriteOpenForWrite. (Arpit Agarwal via wheat9)
|
#testOverwriteOpenForWrite. (Arpit Agarwal via wheat9)
|
||||||
|
|
||||||
|
HDFS-6348. SecondaryNameNode not terminating properly on runtime exceptions
|
||||||
|
(Rakesh R via vinayakumarb)
|
||||||
|
|
||||||
Release 2.7.1 - UNRELEASED
|
Release 2.7.1 - UNRELEASED
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
@ -667,29 +667,28 @@ public class SecondaryNameNode implements Runnable,
|
|||||||
opts.usage();
|
opts.usage();
|
||||||
System.exit(0);
|
System.exit(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
StringUtils.startupShutdownMessage(SecondaryNameNode.class, argv, LOG);
|
|
||||||
Configuration tconf = new HdfsConfiguration();
|
|
||||||
SecondaryNameNode secondary = null;
|
|
||||||
try {
|
try {
|
||||||
|
StringUtils.startupShutdownMessage(SecondaryNameNode.class, argv, LOG);
|
||||||
|
Configuration tconf = new HdfsConfiguration();
|
||||||
|
SecondaryNameNode secondary = null;
|
||||||
secondary = new SecondaryNameNode(tconf, opts);
|
secondary = new SecondaryNameNode(tconf, opts);
|
||||||
} catch (IOException ioe) {
|
|
||||||
LOG.fatal("Failed to start secondary namenode", ioe);
|
if (opts != null && opts.getCommand() != null) {
|
||||||
|
int ret = secondary.processStartupCommand(opts);
|
||||||
|
terminate(ret);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (secondary != null) {
|
||||||
|
secondary.startCheckpointThread();
|
||||||
|
secondary.join();
|
||||||
|
}
|
||||||
|
} catch (Throwable e) {
|
||||||
|
LOG.fatal("Failed to start secondary namenode", e);
|
||||||
terminate(1);
|
terminate(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (opts != null && opts.getCommand() != null) {
|
|
||||||
int ret = secondary.processStartupCommand(opts);
|
|
||||||
terminate(ret);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (secondary != null) {
|
|
||||||
secondary.startCheckpointThread();
|
|
||||||
secondary.join();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public void startCheckpointThread() {
|
public void startCheckpointThread() {
|
||||||
Preconditions.checkState(checkpointThread == null,
|
Preconditions.checkState(checkpointThread == null,
|
||||||
"Should not already have a thread");
|
"Should not already have a thread");
|
||||||
|
@ -60,6 +60,8 @@ import org.apache.hadoop.hdfs.util.MD5FileUtils;
|
|||||||
import org.apache.hadoop.io.MD5Hash;
|
import org.apache.hadoop.io.MD5Hash;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.test.PathUtils;
|
import org.apache.hadoop.test.PathUtils;
|
||||||
|
import org.apache.hadoop.util.ExitUtil.ExitException;
|
||||||
|
import org.apache.hadoop.util.ExitUtil;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
import org.apache.log4j.Logger;
|
import org.apache.log4j.Logger;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
@ -87,6 +89,8 @@ public class TestStartup {
|
|||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void setUp() throws Exception {
|
public void setUp() throws Exception {
|
||||||
|
ExitUtil.disableSystemExit();
|
||||||
|
ExitUtil.resetFirstExitException();
|
||||||
config = new HdfsConfiguration();
|
config = new HdfsConfiguration();
|
||||||
hdfsDir = new File(MiniDFSCluster.getBaseDirectory());
|
hdfsDir = new File(MiniDFSCluster.getBaseDirectory());
|
||||||
|
|
||||||
@ -403,7 +407,19 @@ public class TestStartup {
|
|||||||
cluster.shutdown();
|
cluster.shutdown();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test(timeout = 30000)
|
||||||
|
public void testSNNStartupWithRuntimeException() throws Exception {
|
||||||
|
String[] argv = new String[] { "-checkpoint" };
|
||||||
|
try {
|
||||||
|
SecondaryNameNode.main(argv);
|
||||||
|
fail("Failed to handle runtime exceptions during SNN startup!");
|
||||||
|
} catch (ExitException ee) {
|
||||||
|
GenericTestUtils.assertExceptionContains("ExitException", ee);
|
||||||
|
assertTrue("Didn't termiated properly ", ExitUtil.terminateCalled());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testCompression() throws IOException {
|
public void testCompression() throws IOException {
|
||||||
LOG.info("Test compressing image.");
|
LOG.info("Test compressing image.");
|
||||||
|
Loading…
x
Reference in New Issue
Block a user