HADOOP-8077. HA: fencing method should be able to be configured on a per-NN or per-NS basis. Contributed by Todd Lipcon.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1310173 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
04cc1d614d
commit
d483b6f3fc
@ -252,6 +252,9 @@ Release 2.0.0 - UNRELEASED
|
|||||||
|
|
||||||
HADOOP-8007. Use substitution tokens for fencing argument (todd)
|
HADOOP-8007. Use substitution tokens for fencing argument (todd)
|
||||||
|
|
||||||
|
HADOOP-8077. HA: fencing method should be able to be configured on
|
||||||
|
a per-NN or per-NS basis (todd)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
|
|
||||||
BUG FIXES
|
BUG FIXES
|
||||||
|
@ -53,9 +53,6 @@
|
|||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
@InterfaceStability.Evolving
|
@InterfaceStability.Evolving
|
||||||
public class NodeFencer {
|
public class NodeFencer {
|
||||||
public static final String CONF_METHODS_KEY =
|
|
||||||
"dfs.ha.fencing.methods";
|
|
||||||
|
|
||||||
private static final String CLASS_RE = "([a-zA-Z0-9\\.\\$]+)";
|
private static final String CLASS_RE = "([a-zA-Z0-9\\.\\$]+)";
|
||||||
private static final Pattern CLASS_WITH_ARGUMENT =
|
private static final Pattern CLASS_WITH_ARGUMENT =
|
||||||
Pattern.compile(CLASS_RE + "\\((.+?)\\)");
|
Pattern.compile(CLASS_RE + "\\((.+?)\\)");
|
||||||
@ -76,18 +73,18 @@ public class NodeFencer {
|
|||||||
|
|
||||||
private final List<FenceMethodWithArg> methods;
|
private final List<FenceMethodWithArg> methods;
|
||||||
|
|
||||||
public NodeFencer(Configuration conf)
|
NodeFencer(Configuration conf, String spec)
|
||||||
throws BadFencingConfigurationException {
|
throws BadFencingConfigurationException {
|
||||||
this.methods = parseMethods(conf);
|
this.methods = parseMethods(conf, spec);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static NodeFencer create(Configuration conf)
|
public static NodeFencer create(Configuration conf, String confKey)
|
||||||
throws BadFencingConfigurationException {
|
throws BadFencingConfigurationException {
|
||||||
String confStr = conf.get(CONF_METHODS_KEY);
|
String confStr = conf.get(confKey);
|
||||||
if (confStr == null) {
|
if (confStr == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
return new NodeFencer(conf);
|
return new NodeFencer(conf, confStr);
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean fence(HAServiceTarget fromSvc) {
|
public boolean fence(HAServiceTarget fromSvc) {
|
||||||
@ -115,10 +112,10 @@ public boolean fence(HAServiceTarget fromSvc) {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static List<FenceMethodWithArg> parseMethods(Configuration conf)
|
private static List<FenceMethodWithArg> parseMethods(Configuration conf,
|
||||||
|
String spec)
|
||||||
throws BadFencingConfigurationException {
|
throws BadFencingConfigurationException {
|
||||||
String confStr = conf.get(CONF_METHODS_KEY);
|
String[] lines = spec.split("\\s*\n\\s*");
|
||||||
String[] lines = confStr.split("\\s*\n\\s*");
|
|
||||||
|
|
||||||
List<FenceMethodWithArg> methods = Lists.newArrayList();
|
List<FenceMethodWithArg> methods = Lists.newArrayList();
|
||||||
for (String line : lines) {
|
for (String line : lines) {
|
||||||
|
@ -132,8 +132,7 @@ public static NodeFencer setupFencer(String confStr)
|
|||||||
throws BadFencingConfigurationException {
|
throws BadFencingConfigurationException {
|
||||||
System.err.println("Testing configuration:\n" + confStr);
|
System.err.println("Testing configuration:\n" + confStr);
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
conf.set(NodeFencer.CONF_METHODS_KEY, confStr);
|
return new NodeFencer(conf, confStr);
|
||||||
return new NodeFencer(conf);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -71,8 +71,7 @@ public void testBasicSuccessFailure() {
|
|||||||
public void testCheckNoArgs() {
|
public void testCheckNoArgs() {
|
||||||
try {
|
try {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
conf.set(NodeFencer.CONF_METHODS_KEY, "shell");
|
new NodeFencer(conf, "shell");
|
||||||
new NodeFencer(conf);
|
|
||||||
fail("Didn't throw when passing no args to shell");
|
fail("Didn't throw when passing no args to shell");
|
||||||
} catch (BadFencingConfigurationException confe) {
|
} catch (BadFencingConfigurationException confe) {
|
||||||
assertTrue(
|
assertTrue(
|
||||||
@ -85,8 +84,7 @@ public void testCheckNoArgs() {
|
|||||||
public void testCheckParensNoArgs() {
|
public void testCheckParensNoArgs() {
|
||||||
try {
|
try {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
conf.set(NodeFencer.CONF_METHODS_KEY, "shell()");
|
new NodeFencer(conf, "shell()");
|
||||||
new NodeFencer(conf);
|
|
||||||
fail("Didn't throw when passing no args to shell");
|
fail("Didn't throw when passing no args to shell");
|
||||||
} catch (BadFencingConfigurationException confe) {
|
} catch (BadFencingConfigurationException confe) {
|
||||||
assertTrue(
|
assertTrue(
|
||||||
|
@ -345,4 +345,5 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
|
|||||||
public static final int DFS_HA_LOGROLL_PERIOD_DEFAULT = 2 * 60; // 2m
|
public static final int DFS_HA_LOGROLL_PERIOD_DEFAULT = 2 * 60; // 2m
|
||||||
public static final String DFS_HA_TAILEDITS_PERIOD_KEY = "dfs.ha.tail-edits.period";
|
public static final String DFS_HA_TAILEDITS_PERIOD_KEY = "dfs.ha.tail-edits.period";
|
||||||
public static final int DFS_HA_TAILEDITS_PERIOD_DEFAULT = 60; // 1m
|
public static final int DFS_HA_TAILEDITS_PERIOD_DEFAULT = 60; // 1m
|
||||||
|
public static final String DFS_HA_FENCE_METHODS_KEY = "dfs.ha.fencing.methods";
|
||||||
}
|
}
|
||||||
|
@ -40,6 +40,8 @@
|
|||||||
import org.apache.hadoop.fs.FileUtil;
|
import org.apache.hadoop.fs.FileUtil;
|
||||||
import org.apache.hadoop.fs.Trash;
|
import org.apache.hadoop.fs.Trash;
|
||||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.*;
|
import static org.apache.hadoop.hdfs.DFSConfigKeys.*;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
import org.apache.hadoop.hdfs.DFSUtil;
|
import org.apache.hadoop.hdfs.DFSUtil;
|
||||||
import org.apache.hadoop.hdfs.HAUtil;
|
import org.apache.hadoop.hdfs.HAUtil;
|
||||||
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
||||||
@ -160,7 +162,8 @@ public static enum OperationCategory {
|
|||||||
DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY,
|
DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY,
|
||||||
DFS_NAMENODE_BACKUP_ADDRESS_KEY,
|
DFS_NAMENODE_BACKUP_ADDRESS_KEY,
|
||||||
DFS_NAMENODE_BACKUP_HTTP_ADDRESS_KEY,
|
DFS_NAMENODE_BACKUP_HTTP_ADDRESS_KEY,
|
||||||
DFS_NAMENODE_BACKUP_SERVICE_RPC_ADDRESS_KEY
|
DFS_NAMENODE_BACKUP_SERVICE_RPC_ADDRESS_KEY,
|
||||||
|
DFS_HA_FENCE_METHODS_KEY
|
||||||
};
|
};
|
||||||
|
|
||||||
public long getProtocolVersion(String protocol,
|
public long getProtocolVersion(String protocol,
|
||||||
|
@ -24,6 +24,7 @@
|
|||||||
import org.apache.hadoop.ha.BadFencingConfigurationException;
|
import org.apache.hadoop.ha.BadFencingConfigurationException;
|
||||||
import org.apache.hadoop.ha.HAServiceTarget;
|
import org.apache.hadoop.ha.HAServiceTarget;
|
||||||
import org.apache.hadoop.ha.NodeFencer;
|
import org.apache.hadoop.ha.NodeFencer;
|
||||||
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
import org.apache.hadoop.hdfs.DFSUtil;
|
import org.apache.hadoop.hdfs.DFSUtil;
|
||||||
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
||||||
@ -75,7 +76,8 @@ public NNHAServiceTarget(HdfsConfiguration conf,
|
|||||||
this.addr = NetUtils.createSocketAddr(serviceAddr,
|
this.addr = NetUtils.createSocketAddr(serviceAddr,
|
||||||
NameNode.DEFAULT_PORT);
|
NameNode.DEFAULT_PORT);
|
||||||
try {
|
try {
|
||||||
this.fencer = NodeFencer.create(targetConf);
|
this.fencer = NodeFencer.create(targetConf,
|
||||||
|
DFSConfigKeys.DFS_HA_FENCE_METHODS_KEY);
|
||||||
} catch (BadFencingConfigurationException e) {
|
} catch (BadFencingConfigurationException e) {
|
||||||
this.fenceConfigError = e;
|
this.fenceConfigError = e;
|
||||||
}
|
}
|
||||||
|
@ -158,7 +158,7 @@ public void testFailoverWithNoFencerConfigured() throws Exception {
|
|||||||
public void testFailoverWithFencerConfigured() throws Exception {
|
public void testFailoverWithFencerConfigured() throws Exception {
|
||||||
Mockito.doReturn(STANDBY_READY_RESULT).when(mockProtocol).getServiceStatus();
|
Mockito.doReturn(STANDBY_READY_RESULT).when(mockProtocol).getServiceStatus();
|
||||||
HdfsConfiguration conf = getHAConf();
|
HdfsConfiguration conf = getHAConf();
|
||||||
conf.set(NodeFencer.CONF_METHODS_KEY, "shell(true)");
|
conf.set(DFSConfigKeys.DFS_HA_FENCE_METHODS_KEY, "shell(true)");
|
||||||
tool.setConf(conf);
|
tool.setConf(conf);
|
||||||
assertEquals(0, runTool("-failover", "nn1", "nn2"));
|
assertEquals(0, runTool("-failover", "nn1", "nn2"));
|
||||||
}
|
}
|
||||||
@ -167,7 +167,7 @@ public void testFailoverWithFencerConfigured() throws Exception {
|
|||||||
public void testFailoverWithFencerAndNameservice() throws Exception {
|
public void testFailoverWithFencerAndNameservice() throws Exception {
|
||||||
Mockito.doReturn(STANDBY_READY_RESULT).when(mockProtocol).getServiceStatus();
|
Mockito.doReturn(STANDBY_READY_RESULT).when(mockProtocol).getServiceStatus();
|
||||||
HdfsConfiguration conf = getHAConf();
|
HdfsConfiguration conf = getHAConf();
|
||||||
conf.set(NodeFencer.CONF_METHODS_KEY, "shell(true)");
|
conf.set(DFSConfigKeys.DFS_HA_FENCE_METHODS_KEY, "shell(true)");
|
||||||
tool.setConf(conf);
|
tool.setConf(conf);
|
||||||
assertEquals(0, runTool("-ns", "ns1", "-failover", "nn1", "nn2"));
|
assertEquals(0, runTool("-ns", "ns1", "-failover", "nn1", "nn2"));
|
||||||
}
|
}
|
||||||
@ -176,7 +176,7 @@ public void testFailoverWithFencerAndNameservice() throws Exception {
|
|||||||
public void testFailoverWithFencerConfiguredAndForce() throws Exception {
|
public void testFailoverWithFencerConfiguredAndForce() throws Exception {
|
||||||
Mockito.doReturn(STANDBY_READY_RESULT).when(mockProtocol).getServiceStatus();
|
Mockito.doReturn(STANDBY_READY_RESULT).when(mockProtocol).getServiceStatus();
|
||||||
HdfsConfiguration conf = getHAConf();
|
HdfsConfiguration conf = getHAConf();
|
||||||
conf.set(NodeFencer.CONF_METHODS_KEY, "shell(true)");
|
conf.set(DFSConfigKeys.DFS_HA_FENCE_METHODS_KEY, "shell(true)");
|
||||||
tool.setConf(conf);
|
tool.setConf(conf);
|
||||||
assertEquals(0, runTool("-failover", "nn1", "nn2", "--forcefence"));
|
assertEquals(0, runTool("-failover", "nn1", "nn2", "--forcefence"));
|
||||||
}
|
}
|
||||||
@ -185,7 +185,7 @@ public void testFailoverWithFencerConfiguredAndForce() throws Exception {
|
|||||||
public void testFailoverWithForceActive() throws Exception {
|
public void testFailoverWithForceActive() throws Exception {
|
||||||
Mockito.doReturn(STANDBY_READY_RESULT).when(mockProtocol).getServiceStatus();
|
Mockito.doReturn(STANDBY_READY_RESULT).when(mockProtocol).getServiceStatus();
|
||||||
HdfsConfiguration conf = getHAConf();
|
HdfsConfiguration conf = getHAConf();
|
||||||
conf.set(NodeFencer.CONF_METHODS_KEY, "shell(true)");
|
conf.set(DFSConfigKeys.DFS_HA_FENCE_METHODS_KEY, "shell(true)");
|
||||||
tool.setConf(conf);
|
tool.setConf(conf);
|
||||||
assertEquals(0, runTool("-failover", "nn1", "nn2", "--forceactive"));
|
assertEquals(0, runTool("-failover", "nn1", "nn2", "--forceactive"));
|
||||||
}
|
}
|
||||||
@ -194,7 +194,7 @@ public void testFailoverWithForceActive() throws Exception {
|
|||||||
public void testFailoverWithInvalidFenceArg() throws Exception {
|
public void testFailoverWithInvalidFenceArg() throws Exception {
|
||||||
Mockito.doReturn(STANDBY_READY_RESULT).when(mockProtocol).getServiceStatus();
|
Mockito.doReturn(STANDBY_READY_RESULT).when(mockProtocol).getServiceStatus();
|
||||||
HdfsConfiguration conf = getHAConf();
|
HdfsConfiguration conf = getHAConf();
|
||||||
conf.set(NodeFencer.CONF_METHODS_KEY, "shell(true)");
|
conf.set(DFSConfigKeys.DFS_HA_FENCE_METHODS_KEY, "shell(true)");
|
||||||
tool.setConf(conf);
|
tool.setConf(conf);
|
||||||
assertEquals(-1, runTool("-failover", "nn1", "nn2", "notforcefence"));
|
assertEquals(-1, runTool("-failover", "nn1", "nn2", "notforcefence"));
|
||||||
}
|
}
|
||||||
@ -209,7 +209,7 @@ public void testFailoverWithFenceButNoFencer() throws Exception {
|
|||||||
public void testFailoverWithFenceAndBadFencer() throws Exception {
|
public void testFailoverWithFenceAndBadFencer() throws Exception {
|
||||||
Mockito.doReturn(STANDBY_READY_RESULT).when(mockProtocol).getServiceStatus();
|
Mockito.doReturn(STANDBY_READY_RESULT).when(mockProtocol).getServiceStatus();
|
||||||
HdfsConfiguration conf = getHAConf();
|
HdfsConfiguration conf = getHAConf();
|
||||||
conf.set(NodeFencer.CONF_METHODS_KEY, "foobar!");
|
conf.set(DFSConfigKeys.DFS_HA_FENCE_METHODS_KEY, "foobar!");
|
||||||
tool.setConf(conf);
|
tool.setConf(conf);
|
||||||
assertEquals(-1, runTool("-failover", "nn1", "nn2", "--forcefence"));
|
assertEquals(-1, runTool("-failover", "nn1", "nn2", "--forcefence"));
|
||||||
}
|
}
|
||||||
@ -218,7 +218,7 @@ public void testFailoverWithFenceAndBadFencer() throws Exception {
|
|||||||
public void testForceFenceOptionListedBeforeArgs() throws Exception {
|
public void testForceFenceOptionListedBeforeArgs() throws Exception {
|
||||||
Mockito.doReturn(STANDBY_READY_RESULT).when(mockProtocol).getServiceStatus();
|
Mockito.doReturn(STANDBY_READY_RESULT).when(mockProtocol).getServiceStatus();
|
||||||
HdfsConfiguration conf = getHAConf();
|
HdfsConfiguration conf = getHAConf();
|
||||||
conf.set(NodeFencer.CONF_METHODS_KEY, "shell(true)");
|
conf.set(DFSConfigKeys.DFS_HA_FENCE_METHODS_KEY, "shell(true)");
|
||||||
tool.setConf(conf);
|
tool.setConf(conf);
|
||||||
assertEquals(0, runTool("-failover", "--forcefence", "nn1", "nn2"));
|
assertEquals(0, runTool("-failover", "--forcefence", "nn1", "nn2"));
|
||||||
}
|
}
|
||||||
@ -240,7 +240,41 @@ public void testCheckHealth() throws Exception {
|
|||||||
assertEquals(-1, runTool("-checkHealth", "nn1"));
|
assertEquals(-1, runTool("-checkHealth", "nn1"));
|
||||||
assertOutputContains("Health check failed: fake health check failure");
|
assertOutputContains("Health check failed: fake health check failure");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test that the fencing configuration can be overridden per-nameservice
|
||||||
|
* or per-namenode
|
||||||
|
*/
|
||||||
|
@Test
|
||||||
|
public void testFencingConfigPerNameNode() throws Exception {
|
||||||
|
Mockito.doReturn(STANDBY_READY_RESULT).when(mockProtocol).getServiceStatus();
|
||||||
|
|
||||||
|
final String nsSpecificKey = DFSConfigKeys.DFS_HA_FENCE_METHODS_KEY + "." + NSID;
|
||||||
|
final String nnSpecificKey = nsSpecificKey + ".nn1";
|
||||||
|
|
||||||
|
HdfsConfiguration conf = getHAConf();
|
||||||
|
// Set the default fencer to succeed
|
||||||
|
conf.set(DFSConfigKeys.DFS_HA_FENCE_METHODS_KEY, "shell(true)");
|
||||||
|
tool.setConf(conf);
|
||||||
|
assertEquals(0, runTool("-failover", "nn1", "nn2", "--forcefence"));
|
||||||
|
|
||||||
|
// Set the NN-specific fencer to fail. Should fail to fence.
|
||||||
|
conf.set(nnSpecificKey, "shell(false)");
|
||||||
|
tool.setConf(conf);
|
||||||
|
assertEquals(-1, runTool("-failover", "nn1", "nn2", "--forcefence"));
|
||||||
|
conf.unset(nnSpecificKey);
|
||||||
|
|
||||||
|
// Set an NS-specific fencer to fail. Should fail.
|
||||||
|
conf.set(nsSpecificKey, "shell(false)");
|
||||||
|
tool.setConf(conf);
|
||||||
|
assertEquals(-1, runTool("-failover", "nn1", "nn2", "--forcefence"));
|
||||||
|
|
||||||
|
// Set the NS-specific fencer to succeed. Should succeed
|
||||||
|
conf.set(nsSpecificKey, "shell(true)");
|
||||||
|
tool.setConf(conf);
|
||||||
|
assertEquals(0, runTool("-failover", "nn1", "nn2", "--forcefence"));
|
||||||
|
}
|
||||||
|
|
||||||
private Object runTool(String ... args) throws Exception {
|
private Object runTool(String ... args) throws Exception {
|
||||||
errOutBytes.reset();
|
errOutBytes.reset();
|
||||||
LOG.info("Running: DFSHAAdmin " + Joiner.on(" ").join(args));
|
LOG.info("Running: DFSHAAdmin " + Joiner.on(" ").join(args));
|
||||||
|
@ -28,6 +28,7 @@
|
|||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.impl.Log4JLogger;
|
import org.apache.commons.logging.impl.Log4JLogger;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSNNTopology;
|
import org.apache.hadoop.hdfs.MiniDFSNNTopology;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
||||||
@ -113,7 +114,7 @@ public void testStateTransition() throws Exception {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTryFailoverToSafeMode() throws Exception {
|
public void testTryFailoverToSafeMode() throws Exception {
|
||||||
conf.set(NodeFencer.CONF_METHODS_KEY, "shell(true)");
|
conf.set(DFSConfigKeys.DFS_HA_FENCE_METHODS_KEY, "shell(true)");
|
||||||
tool.setConf(conf);
|
tool.setConf(conf);
|
||||||
|
|
||||||
NameNodeAdapter.enterSafeMode(cluster.getNameNode(0), false);
|
NameNodeAdapter.enterSafeMode(cluster.getNameNode(0), false);
|
||||||
@ -135,7 +136,7 @@ public void testFencer() throws Exception {
|
|||||||
// tmp file, so we can verify that the args were substituted right
|
// tmp file, so we can verify that the args were substituted right
|
||||||
File tmpFile = File.createTempFile("testFencer", ".txt");
|
File tmpFile = File.createTempFile("testFencer", ".txt");
|
||||||
tmpFile.deleteOnExit();
|
tmpFile.deleteOnExit();
|
||||||
conf.set(NodeFencer.CONF_METHODS_KEY,
|
conf.set(DFSConfigKeys.DFS_HA_FENCE_METHODS_KEY,
|
||||||
"shell(echo -n $target_nameserviceid.$target_namenodeid " +
|
"shell(echo -n $target_nameserviceid.$target_namenodeid " +
|
||||||
"$target_port $dfs_ha_namenode_id > " +
|
"$target_port $dfs_ha_namenode_id > " +
|
||||||
tmpFile.getAbsolutePath() + ")");
|
tmpFile.getAbsolutePath() + ")");
|
||||||
@ -168,19 +169,19 @@ public void testFencer() throws Exception {
|
|||||||
|
|
||||||
|
|
||||||
// Test failover with not fencer and forcefence option
|
// Test failover with not fencer and forcefence option
|
||||||
conf.unset(NodeFencer.CONF_METHODS_KEY);
|
conf.unset(DFSConfigKeys.DFS_HA_FENCE_METHODS_KEY);
|
||||||
tool.setConf(conf);
|
tool.setConf(conf);
|
||||||
assertEquals(-1, runTool("-failover", "nn1", "nn2", "--forcefence"));
|
assertEquals(-1, runTool("-failover", "nn1", "nn2", "--forcefence"));
|
||||||
assertFalse(tmpFile.exists());
|
assertFalse(tmpFile.exists());
|
||||||
|
|
||||||
// Test failover with bad fencer and forcefence option
|
// Test failover with bad fencer and forcefence option
|
||||||
conf.set(NodeFencer.CONF_METHODS_KEY, "foobar!");
|
conf.set(DFSConfigKeys.DFS_HA_FENCE_METHODS_KEY, "foobar!");
|
||||||
tool.setConf(conf);
|
tool.setConf(conf);
|
||||||
assertEquals(-1, runTool("-failover", "nn1", "nn2", "--forcefence"));
|
assertEquals(-1, runTool("-failover", "nn1", "nn2", "--forcefence"));
|
||||||
assertFalse(tmpFile.exists());
|
assertFalse(tmpFile.exists());
|
||||||
|
|
||||||
// Test failover with force fence listed before the other arguments
|
// Test failover with force fence listed before the other arguments
|
||||||
conf.set(NodeFencer.CONF_METHODS_KEY, "shell(true)");
|
conf.set(DFSConfigKeys.DFS_HA_FENCE_METHODS_KEY, "shell(true)");
|
||||||
tool.setConf(conf);
|
tool.setConf(conf);
|
||||||
assertEquals(0, runTool("-failover", "--forcefence", "nn1", "nn2"));
|
assertEquals(0, runTool("-failover", "--forcefence", "nn1", "nn2"));
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user