diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/SLSRunner.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/SLSRunner.java index b775d8bd98..1bb8b38ab0 100644 --- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/SLSRunner.java +++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/SLSRunner.java @@ -51,8 +51,11 @@ import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.Path; import org.apache.hadoop.metrics2.source.JvmMetrics; +import org.apache.hadoop.net.DNSToSwitchMapping; +import org.apache.hadoop.net.TableMapping; import org.apache.hadoop.tools.rumen.JobTraceReader; import org.apache.hadoop.tools.rumen.LoggedJob; import org.apache.hadoop.tools.rumen.LoggedTask; @@ -126,6 +129,7 @@ public class SLSRunner extends Configured implements Tool { // other simulation information private int numNMs, numRacks, numAMs, numTasks; private long maxRuntime; + private String tableMapping; private final static Map simulateInfoMap = new HashMap(); @@ -231,7 +235,7 @@ public void setSimulationParams(TraceType inType, String[] inTraces, this.trackedApps = trackApps; this.printSimulation = printsimulation; metricsOutputDir = outDir; - + tableMapping = outDir + "/tableMapping.csv"; } public void start() throws IOException, ClassNotFoundException, YarnException, @@ -272,7 +276,12 @@ private void startRM() throws ClassNotFoundException, YarnException { // TODO add support for FifoScheduler throw new YarnException("Fifo Scheduler is not supported yet."); } - + rmConf.setClass( + CommonConfigurationKeysPublic.NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, + TableMapping.class, DNSToSwitchMapping.class); + rmConf.set( + CommonConfigurationKeysPublic.NET_TOPOLOGY_TABLE_MAPPING_FILE_KEY, + tableMapping); rmConf.set(SLSConfiguration.METRICS_OUTPUT_DIR, metricsOutputDir); final SLSRunner se = this; @@ -332,6 +341,8 @@ private void startNM() throws YarnException, IOException, throw new YarnException("No node! Please configure nodes."); } + SLSUtils.generateNodeTableMapping(nodeSet, tableMapping); + // create NM simulators Random random = new Random(); Set rackSet = new ConcurrentHashSet<>(); diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/utils/SLSUtils.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/utils/SLSUtils.java index 8bb4871e5b..256dcf4629 100644 --- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/utils/SLSUtils.java +++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/utils/SLSUtils.java @@ -23,6 +23,11 @@ import java.io.InputStreamReader; import java.io.Reader; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.nio.file.StandardOpenOption; +import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; @@ -219,4 +224,32 @@ public static Set generateNodes(int numNodes, } return nodeSet; } + + /** + * Generates a node to rack mapping file based on node details. + * This file is then being used by TableMapping to resolve rack names. + * The format required by TableMapping is a two column text file + * where first column specifies node name + * and second column specifies rack name. + * @param nodeDetails Set of node details. + * @param filePath File path where to write table mapping. + * @throws IOException + */ + public static void generateNodeTableMapping(Set nodeDetails, + String filePath) throws IOException { + List entries = new ArrayList<>(); + for (NodeDetails nodeDetail : nodeDetails) { + if (nodeDetail.getHostname().contains("/")) { + String hostname = nodeDetail.getHostname(); + int lIndex = hostname.lastIndexOf("/"); + String node = hostname.substring(lIndex + 1); + String rack = hostname.substring(0, lIndex); + entries.add(node + " " + rack); + } + } + Files.write(Paths.get(filePath), + entries, + StandardCharsets.UTF_8, + StandardOpenOption.CREATE); + } } diff --git a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/utils/TestSLSUtils.java b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/utils/TestSLSUtils.java index c59c2af81b..74907202b1 100644 --- a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/utils/TestSLSUtils.java +++ b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/utils/TestSLSUtils.java @@ -24,7 +24,11 @@ import org.junit.Assert; import org.junit.Test; +import java.io.File; +import java.nio.file.Files; +import java.nio.file.Paths; import java.util.HashSet; +import java.util.List; import java.util.Set; public class TestSLSUtils { @@ -109,6 +113,26 @@ public void testGenerateNodes() { Assert.assertEquals("Number of racks is wrong.", 1, getNumRack(nodes)); } + /** + * Tests creation of table mapping based on given node details. + * @throws Exception + */ + @Test + public void testGenerateNodeTableMapping() throws Exception { + Set nodes = SLSUtils.generateNodes(3, 3); + File tempFile = File.createTempFile("testslsutils", ".tmp"); + tempFile.deleteOnExit(); + String fileName = tempFile.getAbsolutePath(); + SLSUtils.generateNodeTableMapping(nodes, fileName); + + List lines = Files.readAllLines(Paths.get(fileName)); + Assert.assertEquals(3, lines.size()); + for (String line : lines) { + Assert.assertTrue(line.contains("node")); + Assert.assertTrue(line.contains("/rack")); + } + } + private int getNumRack(Set nodes) { Set racks = new HashSet<>(); for (NodeDetails node : nodes) {