diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java index 8f6dd4b111..45c45b146f 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java @@ -2451,8 +2451,6 @@ public boolean mkdirs(Path f, FsPermission permission, boolean noUmask) throws I ArrayList keysToCreateAsFolder = new ArrayList(); - ArrayList keysToUpdateAsFolder = new ArrayList(); - boolean childCreated = false; // Check that there is no file in the parent chain of the given path. for (Path current = absolutePath, parent = current.getParent(); parent != null; // Stop when you get to the root @@ -2464,14 +2462,6 @@ public boolean mkdirs(Path f, FsPermission permission, boolean noUmask) throws I + current + " is an existing file."); } else if (currentMetadata == null) { keysToCreateAsFolder.add(currentKey); - childCreated = true; - } else { - // The directory already exists. Its last modified time need to be - // updated if there is a child directory created under it. - if (childCreated) { - keysToUpdateAsFolder.add(currentKey); - } - childCreated = false; } } diff --git a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/InputStriper.java b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/InputStriper.java index a9d404d508..6cdcc4e4b1 100644 --- a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/InputStriper.java +++ b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/InputStriper.java @@ -131,9 +131,7 @@ private long[] toLongArray(final ArrayList sigh) { static final Comparator> hostRank = new Comparator>() { public int compare(Entry a, Entry b) { - final double va = a.getValue(); - final double vb = b.getValue(); - return va > vb ? -1 : va < vb ? 1 : 0; - } + return Double.compare(a.getValue(), b.getValue()); + } }; } diff --git a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/TotalHeapUsageEmulatorPlugin.java b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/TotalHeapUsageEmulatorPlugin.java index 4cfe1b533a..bfbf516611 100644 --- a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/TotalHeapUsageEmulatorPlugin.java +++ b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/TotalHeapUsageEmulatorPlugin.java @@ -19,6 +19,8 @@ import java.io.IOException; import java.util.ArrayList; + +import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapred.gridmix.Progressive; import org.apache.hadoop.tools.rumen.ResourceUsageMetrics; @@ -129,7 +131,7 @@ public void initialize(ResourceCalculatorPlugin monitor, public static class DefaultHeapUsageEmulator implements HeapUsageEmulatorCore { // store the unit loads in a list - protected static final ArrayList heapSpace = + private static final ArrayList heapSpace = new ArrayList(); /** @@ -142,7 +144,17 @@ public void load(long sizeInMB) { heapSpace.add((Object)new byte[ONE_MB]); } } - + + /** + * Gets the total number of 1mb objects stored in the emulator. + * + * @return total number of 1mb objects. + */ + @VisibleForTesting + public int getHeapSpaceSize() { + return heapSpace.size(); + } + /** * This will initialize the core and check if the core can emulate the * desired target on the underlying hardware. diff --git a/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestGridmixMemoryEmulation.java b/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestGridmixMemoryEmulation.java index 7ec3c93d36..d79c010104 100644 --- a/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestGridmixMemoryEmulation.java +++ b/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/TestGridmixMemoryEmulation.java @@ -58,7 +58,7 @@ int getNumCalls() { // Get the total number of 1mb objects stored within long getHeapUsageInMB() { - return heapSpace.size(); + return getHeapSpaceSize(); } @Override diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/util/MapReduceJobPropertiesParser.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/util/MapReduceJobPropertiesParser.java index 7547eca231..1c92caf987 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/util/MapReduceJobPropertiesParser.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/util/MapReduceJobPropertiesParser.java @@ -130,7 +130,7 @@ public DataType parseJobProperty(String key, String value) { /** * Extracts the -Xmx heap option from the specified string. */ - public static void extractMaxHeapOpts(String javaOptions, + public static void extractMaxHeapOpts(final String javaOptions, List heapOpts, List others) { for (String opt : javaOptions.split(" ")) { @@ -160,6 +160,7 @@ public static void extractMinHeapOpts(String javaOptions, // Maps the value of the specified key. private DataType fromString(String key, String value) { + DefaultDataType defaultValue = new DefaultDataType(value); if (value != null) { // check known configs // job-name @@ -190,14 +191,13 @@ private DataType fromString(String key, String value) { // check if the config parameter represents a number try { format.parse(value); - return new DefaultDataType(value); + return defaultValue; } catch (ParseException pe) {} // check if the config parameters represents a boolean // avoiding exceptions if ("true".equals(value) || "false".equals(value)) { - Boolean.parseBoolean(value); - return new DefaultDataType(value); + return defaultValue; } // check if the config parameter represents a class @@ -208,7 +208,7 @@ private DataType fromString(String key, String value) { // handle distributed cache sizes and timestamps if (latestKey.endsWith("sizes") || latestKey.endsWith(".timestamps")) { - new DefaultDataType(value); + return defaultValue; } // check if the config parameter represents a file-system path diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/SLSRunner.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/SLSRunner.java index ddd35ef475..03281a5f54 100644 --- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/SLSRunner.java +++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/SLSRunner.java @@ -32,6 +32,7 @@ import java.util.Map; import java.util.Random; import java.util.Set; +import java.util.Collections; import java.util.concurrent.ConcurrentHashMap; import com.fasterxml.jackson.core.JsonFactory; @@ -113,7 +114,8 @@ public class SLSRunner extends Configured implements Tool { // other simulation information private int numNMs, numRacks, numAMs, numTasks; private long maxRuntime; - public final static Map simulateInfoMap = + + private final static Map simulateInfoMap = new HashMap(); // logger @@ -165,6 +167,13 @@ private void init(Configuration tempConf) throws ClassNotFoundException { } } + /** + * @return an unmodifiable view of the simulated info map. + */ + public static Map getSimulateInfoMap() { + return Collections.unmodifiableMap(simulateInfoMap); + } + public void setSimulationParams(TraceType inType, String[] inTraces, String nodes, String outDir, Set trackApps, boolean printsimulation) throws IOException, ClassNotFoundException { diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/FairSchedulerMetrics.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/FairSchedulerMetrics.java index 08362b1c60..7b306f0410 100644 --- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/FairSchedulerMetrics.java +++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/FairSchedulerMetrics.java @@ -180,16 +180,16 @@ public void trackQueue(String queueName) { new Gauge() { @Override public Long getValue() { - if (! maxReset && - SLSRunner.simulateInfoMap.containsKey("Number of nodes") && - SLSRunner.simulateInfoMap.containsKey("Node memory (MB)") && - SLSRunner.simulateInfoMap.containsKey("Node VCores")) { - int numNMs = Integer.parseInt( - SLSRunner.simulateInfoMap.get("Number of nodes").toString()); - int numMemoryMB = Integer.parseInt( - SLSRunner.simulateInfoMap.get("Node memory (MB)").toString()); - int numVCores = Integer.parseInt( - SLSRunner.simulateInfoMap.get("Node VCores").toString()); + if (! maxReset + && SLSRunner.getSimulateInfoMap().containsKey("Number of nodes") + && SLSRunner.getSimulateInfoMap().containsKey("Node memory (MB)") + && SLSRunner.getSimulateInfoMap().containsKey("Node VCores")) { + int numNMs = Integer.parseInt(SLSRunner.getSimulateInfoMap() + .get("Number of nodes").toString()); + int numMemoryMB = Integer.parseInt(SLSRunner.getSimulateInfoMap() + .get("Node memory (MB)").toString()); + int numVCores = Integer.parseInt(SLSRunner.getSimulateInfoMap() + .get("Node VCores").toString()); totalMemoryMB = numNMs * numMemoryMB; totalVCores = numNMs * numVCores; diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/web/SLSWebApp.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/web/SLSWebApp.java index 29bbe1a6d7..3daadcabd7 100644 --- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/web/SLSWebApp.java +++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/web/SLSWebApp.java @@ -187,14 +187,14 @@ private void printPageIndex(HttpServletRequest request, response.setStatus(HttpServletResponse.SC_OK); String simulateInfo; - if (SLSRunner.simulateInfoMap.isEmpty()) { + if (SLSRunner.getSimulateInfoMap().isEmpty()) { String empty = "" + "No information available"; simulateInfo = MessageFormat.format(simulateInfoTemplate, empty); } else { StringBuilder info = new StringBuilder(); for (Map.Entry entry : - SLSRunner.simulateInfoMap.entrySet()) { + SLSRunner.getSimulateInfoMap().entrySet()) { info.append(""); info.append("").append(entry.getKey()).append(""); info.append("").append(entry.getValue()) diff --git a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/web/TestSLSWebApp.java b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/web/TestSLSWebApp.java index 1c1e63cfd2..c9be450fee 100644 --- a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/web/TestSLSWebApp.java +++ b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/web/TestSLSWebApp.java @@ -20,7 +20,6 @@ import org.junit.Assert; import org.apache.commons.io.FileUtils; -import org.apache.hadoop.yarn.sls.SLSRunner; import org.junit.Test; import java.io.File; @@ -28,6 +27,7 @@ import java.util.HashSet; import java.util.Map; import java.util.Set; +import java.util.HashMap; public class TestSLSWebApp { @@ -36,20 +36,21 @@ public void testSimulateInfoPageHtmlTemplate() throws Exception { String simulateInfoTemplate = FileUtils.readFileToString( new File("src/main/html/simulate.info.html.template")); - SLSRunner.simulateInfoMap.put("Number of racks", 10); - SLSRunner.simulateInfoMap.put("Number of nodes", 100); - SLSRunner.simulateInfoMap.put("Node memory (MB)", 1024); - SLSRunner.simulateInfoMap.put("Node VCores", 1); - SLSRunner.simulateInfoMap.put("Number of applications", 100); - SLSRunner.simulateInfoMap.put("Number of tasks", 1000); - SLSRunner.simulateInfoMap.put("Average tasks per applicaion", 10); - SLSRunner.simulateInfoMap.put("Number of queues", 4); - SLSRunner.simulateInfoMap.put("Average applications per queue", 25); - SLSRunner.simulateInfoMap.put("Estimated simulate time (s)", 10000); + Map simulateInfoMap = new HashMap<>(); + simulateInfoMap.put("Number of racks", 10); + simulateInfoMap.put("Number of nodes", 100); + simulateInfoMap.put("Node memory (MB)", 1024); + simulateInfoMap.put("Node VCores", 1); + simulateInfoMap.put("Number of applications", 100); + simulateInfoMap.put("Number of tasks", 1000); + simulateInfoMap.put("Average tasks per applicaion", 10); + simulateInfoMap.put("Number of queues", 4); + simulateInfoMap.put("Average applications per queue", 25); + simulateInfoMap.put("Estimated simulate time (s)", 10000); StringBuilder info = new StringBuilder(); for (Map.Entry entry : - SLSRunner.simulateInfoMap.entrySet()) { + simulateInfoMap.entrySet()) { info.append(""); info.append("" + entry.getKey() + ""); info.append("" + entry.getValue() + ""); @@ -60,8 +61,7 @@ public void testSimulateInfoPageHtmlTemplate() throws Exception { MessageFormat.format(simulateInfoTemplate, info.toString()); Assert.assertTrue("The simulate info html page should not be empty", simulateInfo.length() > 0); - for (Map.Entry entry : - SLSRunner.simulateInfoMap.entrySet()) { + for (Map.Entry entry : simulateInfoMap.entrySet()) { Assert.assertTrue("The simulate info html page should have information " + "of " + entry.getKey(), simulateInfo.contains("" + entry.getKey() + ""