diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/JobHistoryFileParser.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/JobHistoryFileParser.java deleted file mode 100644 index 9d051dfde4..0000000000 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/JobHistoryFileParser.java +++ /dev/null @@ -1,53 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.mapred; - -import java.io.IOException; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser; -import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo; - -class JobHistoryFileParser { - private static final Log LOG = LogFactory.getLog(JobHistoryFileParser.class); - - private final FileSystem fs; - - public JobHistoryFileParser(FileSystem fs) { - LOG.info("JobHistoryFileParser created with " + fs); - this.fs = fs; - } - - public JobInfo parseHistoryFile(Path path) throws IOException { - LOG.info("parsing job history file " + path); - JobHistoryParser parser = new JobHistoryParser(fs, path); - return parser.parse(); - } - - public Configuration parseConfiguration(Path path) throws IOException { - LOG.info("parsing job configuration file " + path); - Configuration conf = new Configuration(false); - conf.addResource(fs.open(path)); - return conf; - } -} diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TimelineServicePerformanceV2.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TimelineServicePerformanceV2.java deleted file mode 100644 index f674ae1be0..0000000000 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TimelineServicePerformanceV2.java +++ /dev/null @@ -1,229 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.mapred; - -import java.io.IOException; -import java.util.Date; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.conf.Configured; -import org.apache.hadoop.io.IntWritable; -import org.apache.hadoop.io.Writable; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.MRJobConfig; -import org.apache.hadoop.mapreduce.SleepJob.SleepInputFormat; -import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; -import org.apache.hadoop.util.GenericOptionsParser; -import org.apache.hadoop.util.Tool; -import org.apache.hadoop.util.ToolRunner; -import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollectorManager; - -public class TimelineServicePerformanceV2 extends Configured implements Tool { - static final int NUM_MAPS_DEFAULT = 1; - - static final int SIMPLE_ENTITY_WRITER = 1; - static final int JOB_HISTORY_FILE_REPLAY_MAPPER = 2; - static int mapperType = SIMPLE_ENTITY_WRITER; - - protected static int printUsage() { - System.err.println( - "Usage: [-m ] number of mappers (default: " + NUM_MAPS_DEFAULT + - ")\n" + - " [-mtype ]\n" + - " 1. simple entity write mapper\n" + - " 2. job history file replay mapper\n" + - " [-s <(KBs)test>] number of KB per put (mtype=1, default: " + - SimpleEntityWriter.KBS_SENT_DEFAULT + " KB)\n" + - " [-t] package sending iterations per mapper (mtype=1, default: " + - SimpleEntityWriter.TEST_TIMES_DEFAULT + ")\n" + - " [-d ] root path of job history files (mtype=2)\n" + - " [-r ] (mtype=2)\n" + - " 1. write all entities for a job in one put (default)\n" + - " 2. write one entity at a time\n"); - GenericOptionsParser.printGenericCommandUsage(System.err); - return -1; - } - - /** - * Configure a job given argv. - */ - public static boolean parseArgs(String[] args, Job job) throws IOException { - // set the common defaults - Configuration conf = job.getConfiguration(); - conf.setInt(MRJobConfig.NUM_MAPS, NUM_MAPS_DEFAULT); - - for (int i = 0; i < args.length; i++) { - if (args.length == i + 1) { - System.out.println("ERROR: Required parameter missing from " + args[i]); - return printUsage() == 0; - } - try { - if ("-m".equals(args[i])) { - if (Integer.parseInt(args[++i]) > 0) { - job.getConfiguration() - .setInt(MRJobConfig.NUM_MAPS, Integer.parseInt(args[i])); - } - } else if ("-mtype".equals(args[i])) { - mapperType = Integer.parseInt(args[++i]); - } else if ("-s".equals(args[i])) { - if (Integer.parseInt(args[++i]) > 0) { - conf.setInt(SimpleEntityWriter.KBS_SENT, Integer.parseInt(args[i])); - } - } else if ("-t".equals(args[i])) { - if (Integer.parseInt(args[++i]) > 0) { - conf.setInt(SimpleEntityWriter.TEST_TIMES, - Integer.parseInt(args[i])); - } - } else if ("-d".equals(args[i])) { - conf.set(JobHistoryFileReplayMapper.PROCESSING_PATH, args[++i]); - } else if ("-r".equals(args[i])) { - conf.setInt(JobHistoryFileReplayMapper.REPLAY_MODE, - Integer.parseInt(args[++i])); - } else { - System.out.println("Unexpected argument: " + args[i]); - return printUsage() == 0; - } - } catch (NumberFormatException except) { - System.out.println("ERROR: Integer expected instead of " + args[i]); - return printUsage() == 0; - } catch (Exception e) { - throw (IOException)new IOException().initCause(e); - } - } - - // handle mapper-specific settings - switch (mapperType) { - case JOB_HISTORY_FILE_REPLAY_MAPPER: - job.setMapperClass(JobHistoryFileReplayMapper.class); - String processingPath = - conf.get(JobHistoryFileReplayMapper.PROCESSING_PATH); - if (processingPath == null || processingPath.isEmpty()) { - System.out.println("processing path is missing while mtype = 2"); - return printUsage() == 0; - } - break; - case SIMPLE_ENTITY_WRITER: - default: - job.setMapperClass(SimpleEntityWriter.class); - // use the current timestamp as the "run id" of the test: this will - // be used as simulating the cluster timestamp for apps - conf.setLong(SimpleEntityWriter.TIMELINE_SERVICE_PERFORMANCE_RUN_ID, - System.currentTimeMillis()); - break; - } - - return true; - } - - /** - * TimelineServer Performance counters - */ - static enum PerfCounters { - TIMELINE_SERVICE_WRITE_TIME, - TIMELINE_SERVICE_WRITE_COUNTER, - TIMELINE_SERVICE_WRITE_FAILURES, - TIMELINE_SERVICE_WRITE_KBS, - } - - public int run(String[] args) throws Exception { - - Job job = Job.getInstance(getConf()); - job.setJarByClass(TimelineServicePerformanceV2.class); - job.setMapperClass(SimpleEntityWriter.class); - job.setInputFormatClass(SleepInputFormat.class); - job.setOutputFormatClass(NullOutputFormat.class); - job.setNumReduceTasks(0); - if (!parseArgs(args, job)) { - return -1; - } - - Date startTime = new Date(); - System.out.println("Job started: " + startTime); - int ret = job.waitForCompletion(true) ? 0 : 1; - org.apache.hadoop.mapreduce.Counters counters = job.getCounters(); - long writetime = - counters.findCounter(PerfCounters.TIMELINE_SERVICE_WRITE_TIME).getValue(); - long writecounts = - counters.findCounter(PerfCounters.TIMELINE_SERVICE_WRITE_COUNTER).getValue(); - long writesize = - counters.findCounter(PerfCounters.TIMELINE_SERVICE_WRITE_KBS).getValue(); - double transacrate = writecounts * 1000 / (double)writetime; - double iorate = writesize * 1000 / (double)writetime; - int numMaps = - Integer.parseInt(job.getConfiguration().get(MRJobConfig.NUM_MAPS)); - - System.out.println("TRANSACTION RATE (per mapper): " + transacrate + - " ops/s"); - System.out.println("IO RATE (per mapper): " + iorate + " KB/s"); - - System.out.println("TRANSACTION RATE (total): " + transacrate*numMaps + - " ops/s"); - System.out.println("IO RATE (total): " + iorate*numMaps + " KB/s"); - - return ret; - } - - public static void main(String[] args) throws Exception { - int res = - ToolRunner.run(new Configuration(), new TimelineServicePerformanceV2(), - args); - System.exit(res); - } - - /** - * To ensure that the compression really gets exercised, generate a - * random alphanumeric fixed length payload - */ - static final char[] alphaNums = new char[] { 'a', 'b', 'c', 'd', 'e', 'f', - 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', - 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', - 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', - 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '1', '2', - '3', '4', '5', '6', '7', '8', '9', '0', ' ' }; - - /** - * Base mapper for writing entities to the timeline service. Subclasses - * override {@link #writeEntities(Configuration, TimelineCollectorManager, - * org.apache.hadoop.mapreduce.Mapper.Context)} to create and write entities - * to the timeline service. - */ - public static abstract class EntityWriter - extends org.apache.hadoop.mapreduce.Mapper { - @Override - public void map(IntWritable key, IntWritable val, Context context) - throws IOException { - - // create the timeline collector manager wired with the writer - Configuration tlConf = new YarnConfiguration(); - TimelineCollectorManager manager = new TimelineCollectorManager("test"); - manager.init(tlConf); - manager.start(); - try { - // invoke the method to have the subclass write entities - writeEntities(tlConf, manager, context); - } finally { - manager.close(); - } - } - - protected abstract void writeEntities(Configuration tlConf, - TimelineCollectorManager manager, Context context) throws IOException; - } -} diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/EntityWriterV2.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/EntityWriterV2.java new file mode 100644 index 0000000000..f5d95c3a40 --- /dev/null +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/EntityWriterV2.java @@ -0,0 +1,56 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.mapreduce; + +import java.io.IOException; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.io.IntWritable; +import org.apache.hadoop.io.Writable; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollectorManager; + +/** + * Base mapper for writing entities to the timeline service. Subclasses + * override {@link #writeEntities(Configuration, TimelineCollectorManager, + * org.apache.hadoop.mapreduce.Mapper.Context)} to create and write entities + * to the timeline service. + */ +abstract class EntityWriterV2 + extends org.apache.hadoop.mapreduce.Mapper { + @Override + public void map(IntWritable key, IntWritable val, Context context) + throws IOException { + + // create the timeline collector manager wired with the writer + Configuration tlConf = new YarnConfiguration(); + TimelineCollectorManager manager = new TimelineCollectorManager("test"); + manager.init(tlConf); + manager.start(); + try { + // invoke the method to have the subclass write entities + writeEntities(tlConf, manager, context); + } finally { + manager.close(); + } + } + + protected abstract void writeEntities(Configuration tlConf, + TimelineCollectorManager manager, Context context) throws IOException; +} \ No newline at end of file diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV1.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV1.java index 5e106622f5..447ea4e6b4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV1.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV1.java @@ -20,33 +20,21 @@ import java.io.IOException; import java.util.Collection; -import java.util.HashMap; -import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; -import java.util.regex.Matcher; -import java.util.regex.Pattern; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Writable; -import org.apache.hadoop.mapreduce.Mapper.Context; -import org.apache.hadoop.mapreduce.TimelineServicePerformance.PerfCounters; -import org.apache.hadoop.mapreduce.JobHistoryFileReplayHelper; import org.apache.hadoop.mapreduce.JobHistoryFileReplayHelper.JobFiles; -import org.apache.hadoop.mapreduce.MRJobConfig; -import org.apache.hadoop.mapreduce.TypeConverter; +import org.apache.hadoop.mapreduce.TimelineServicePerformance.PerfCounters; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo; import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; import org.apache.hadoop.yarn.client.api.TimelineClient; import org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/JobHistoryFileReplayMapper.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV2.java similarity index 51% rename from hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/JobHistoryFileReplayMapper.java rename to hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV2.java index 4fb5308084..6a9a878c73 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/JobHistoryFileReplayMapper.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV2.java @@ -16,28 +16,19 @@ * limitations under the License. */ -package org.apache.hadoop.mapred; +package org.apache.hadoop.mapreduce; import java.io.IOException; import java.util.Collection; -import java.util.HashMap; import java.util.List; -import java.util.Map; import java.util.concurrent.TimeUnit; -import java.util.regex.Matcher; -import java.util.regex.Pattern; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.LocatedFileStatus; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.RemoteIterator; -import org.apache.hadoop.mapred.TimelineServicePerformanceV2.EntityWriter; -import org.apache.hadoop.mapred.TimelineServicePerformanceV2.PerfCounters; -import org.apache.hadoop.mapreduce.MRJobConfig; -import org.apache.hadoop.mapreduce.TypeConverter; +import org.apache.hadoop.mapred.JobID; +import org.apache.hadoop.mapreduce.JobHistoryFileReplayHelper.JobFiles; +import org.apache.hadoop.mapreduce.TimelineServicePerformance.PerfCounters; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo; import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.security.UserGroupInformation; @@ -49,96 +40,24 @@ import org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollectorManager; /** - * Mapper for TimelineServicePerformanceV2 that replays job history files to the - * timeline service. + * Mapper for TimelineServicePerformance that replays job history files to the + * timeline service v.2. * */ -class JobHistoryFileReplayMapper extends EntityWriter { +class JobHistoryFileReplayMapperV2 extends EntityWriterV2 { private static final Log LOG = - LogFactory.getLog(JobHistoryFileReplayMapper.class); - - static final String PROCESSING_PATH = "processing path"; - static final String REPLAY_MODE = "replay mode"; - static final int WRITE_ALL_AT_ONCE = 1; - static final int WRITE_PER_ENTITY = 2; - static final int REPLAY_MODE_DEFAULT = WRITE_ALL_AT_ONCE; - - private static final Pattern JOB_ID_PARSER = - Pattern.compile("^(job_[0-9]+_([0-9]+)).*"); - - public static class JobFiles { - private final String jobId; - private Path jobHistoryFilePath; - private Path jobConfFilePath; - - public JobFiles(String jobId) { - this.jobId = jobId; - } - - public String getJobId() { - return jobId; - } - - public Path getJobHistoryFilePath() { - return jobHistoryFilePath; - } - - public void setJobHistoryFilePath(Path jobHistoryFilePath) { - this.jobHistoryFilePath = jobHistoryFilePath; - } - - public Path getJobConfFilePath() { - return jobConfFilePath; - } - - public void setJobConfFilePath(Path jobConfFilePath) { - this.jobConfFilePath = jobConfFilePath; - } - - @Override - public int hashCode() { - return jobId.hashCode(); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - JobFiles other = (JobFiles) obj; - return jobId.equals(other.jobId); - } - } - - private enum FileType { JOB_HISTORY_FILE, JOB_CONF_FILE, UNKNOWN } - + LogFactory.getLog(JobHistoryFileReplayMapperV2.class); @Override protected void writeEntities(Configuration tlConf, TimelineCollectorManager manager, Context context) throws IOException { - // collect the apps it needs to process - Configuration conf = context.getConfiguration(); - int taskId = context.getTaskAttemptID().getTaskID().getId(); - int size = conf.getInt(MRJobConfig.NUM_MAPS, - TimelineServicePerformanceV2.NUM_MAPS_DEFAULT); - String processingDir = - conf.get(JobHistoryFileReplayMapper.PROCESSING_PATH); - int replayMode = - conf.getInt(JobHistoryFileReplayMapper.REPLAY_MODE, - JobHistoryFileReplayMapper.REPLAY_MODE_DEFAULT); - Path processingPath = new Path(processingDir); - FileSystem processingFs = processingPath.getFileSystem(conf); - JobHistoryFileParser parser = new JobHistoryFileParser(processingFs); - TimelineEntityConverter converter = new TimelineEntityConverter(); + JobHistoryFileReplayHelper helper = new JobHistoryFileReplayHelper(context); + int replayMode = helper.getReplayMode(); + JobHistoryFileParser parser = helper.getParser(); + TimelineEntityConverterV2 converter = new TimelineEntityConverterV2(); - Collection jobs = - selectJobFiles(processingFs, processingPath, taskId, size); + // collect the apps it needs to process + Collection jobs = helper.getJobFiles(); if (jobs.isEmpty()) { LOG.info(context.getTaskAttemptID().getTaskID() + " will process no jobs"); @@ -149,6 +68,13 @@ protected void writeEntities(Configuration tlConf, for (JobFiles job: jobs) { // process each job String jobIdStr = job.getJobId(); + // skip if either of the file is missing + if (job.getJobConfFilePath() == null || + job.getJobHistoryFilePath() == null) { + LOG.info(jobIdStr + " missing either the job history file or the " + + "configuration file. Skipping."); + continue; + } LOG.info("processing " + jobIdStr + "..."); JobId jobId = TypeConverter.toYarn(JobID.forName(jobIdStr)); ApplicationId appId = jobId.getAppId(); @@ -184,10 +110,10 @@ protected void writeEntities(Configuration tlConf, long startWrite = System.nanoTime(); try { switch (replayMode) { - case JobHistoryFileReplayMapper.WRITE_ALL_AT_ONCE: + case JobHistoryFileReplayHelper.WRITE_ALL_AT_ONCE: writeAllEntities(collector, entitySet, ugi); break; - case JobHistoryFileReplayMapper.WRITE_PER_ENTITY: + case JobHistoryFileReplayHelper.WRITE_PER_ENTITY: writePerEntity(collector, entitySet, ugi); break; default: @@ -232,70 +158,4 @@ private void writePerEntity(AppLevelTimelineCollector collector, LOG.info("wrote entity " + entity.getId()); } } - - private Collection selectJobFiles(FileSystem fs, - Path processingRoot, int i, int size) throws IOException { - Map jobs = new HashMap<>(); - RemoteIterator it = fs.listFiles(processingRoot, true); - while (it.hasNext()) { - LocatedFileStatus status = it.next(); - Path path = status.getPath(); - String fileName = path.getName(); - Matcher m = JOB_ID_PARSER.matcher(fileName); - if (!m.matches()) { - continue; - } - String jobId = m.group(1); - int lastId = Integer.parseInt(m.group(2)); - int mod = lastId % size; - if (mod != i) { - continue; - } - LOG.info("this mapper will process file " + fileName); - // it's mine - JobFiles jobFiles = jobs.get(jobId); - if (jobFiles == null) { - jobFiles = new JobFiles(jobId); - jobs.put(jobId, jobFiles); - } - setFilePath(fileName, path, jobFiles); - } - return jobs.values(); - } - - private void setFilePath(String fileName, Path path, - JobFiles jobFiles) { - // determine if we're dealing with a job history file or a job conf file - FileType type = getFileType(fileName); - switch (type) { - case JOB_HISTORY_FILE: - if (jobFiles.getJobHistoryFilePath() == null) { - jobFiles.setJobHistoryFilePath(path); - } else { - LOG.warn("we already have the job history file " + - jobFiles.getJobHistoryFilePath() + ": skipping " + path); - } - break; - case JOB_CONF_FILE: - if (jobFiles.getJobConfFilePath() == null) { - jobFiles.setJobConfFilePath(path); - } else { - LOG.warn("we already have the job conf file " + - jobFiles.getJobConfFilePath() + ": skipping " + path); - } - break; - case UNKNOWN: - LOG.warn("unknown type: " + path); - } - } - - private FileType getFileType(String fileName) { - if (fileName.endsWith(".jhist")) { - return FileType.JOB_HISTORY_FILE; - } - if (fileName.endsWith("_conf.xml")) { - return FileType.JOB_CONF_FILE; - } - return FileType.UNKNOWN; - } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterConstants.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterConstants.java new file mode 100644 index 0000000000..b89d0e879b --- /dev/null +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterConstants.java @@ -0,0 +1,43 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.mapreduce; + +/** + * Constants for simple entity writers. + */ +interface SimpleEntityWriterConstants { + // constants for mtype = 1 + String KBS_SENT = "kbs sent"; + int KBS_SENT_DEFAULT = 1; + String TEST_TIMES = "testtimes"; + int TEST_TIMES_DEFAULT = 100; + String TIMELINE_SERVICE_PERFORMANCE_RUN_ID = + "timeline.server.performance.run.id"; + + /** + * To ensure that the compression really gets exercised, generate a + * random alphanumeric fixed length payload + */ + char[] ALPHA_NUMS = new char[] { 'a', 'b', 'c', 'd', 'e', 'f', + 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', + 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', + 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', + 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '1', '2', + '3', '4', '5', '6', '7', '8', '9', '0', ' ' }; +} diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterV1.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterV1.java index 2c851e9586..b10ae049f8 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterV1.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterV1.java @@ -27,44 +27,22 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Writable; -import org.apache.hadoop.mapreduce.Mapper.Context; import org.apache.hadoop.mapreduce.TimelineServicePerformance.PerfCounters; -import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; import org.apache.hadoop.yarn.client.api.TimelineClient; import org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl; -import org.apache.hadoop.yarn.conf.YarnConfiguration; /** * Adds simple entities with random string payload, events, metrics, and * configuration. */ -class SimpleEntityWriterV1 extends - org.apache.hadoop.mapreduce.Mapper { +class SimpleEntityWriterV1 + extends org.apache.hadoop.mapreduce.Mapper + implements SimpleEntityWriterConstants { private static final Log LOG = LogFactory.getLog(SimpleEntityWriterV1.class); - // constants for mtype = 1 - static final String KBS_SENT = "kbs sent"; - static final int KBS_SENT_DEFAULT = 1; - static final String TEST_TIMES = "testtimes"; - static final int TEST_TIMES_DEFAULT = 100; - static final String TIMELINE_SERVICE_PERFORMANCE_RUN_ID = - "timeline.server.performance.run.id"; - /** - * To ensure that the compression really gets exercised, generate a - * random alphanumeric fixed length payload - */ - private static char[] ALPHA_NUMS = new char[] { 'a', 'b', 'c', 'd', 'e', 'f', - 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', - 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', - 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', - 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '1', '2', - '3', '4', '5', '6', '7', '8', '9', '0', ' ' }; - public void map(IntWritable key, IntWritable val, Context context) throws IOException { TimelineClient tlc = new TimelineClientImpl(); Configuration conf = context.getConfiguration(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/SimpleEntityWriter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterV2.java similarity index 85% rename from hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/SimpleEntityWriter.java rename to hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterV2.java index 625c32ae60..d66deb0e73 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/SimpleEntityWriter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterV2.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.mapred; +package org.apache.hadoop.mapreduce; import java.io.IOException; import java.util.Random; @@ -25,9 +25,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.mapred.TimelineServicePerformanceV2.EntityWriter; -import org.apache.hadoop.mapred.TimelineServicePerformanceV2.PerfCounters; -import org.apache.hadoop.mapreduce.TaskAttemptID; +import org.apache.hadoop.mapreduce.TimelineServicePerformance.PerfCounters; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities; @@ -39,19 +37,12 @@ import org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollectorManager; /** - * Adds simple entities with random string payload, events, metrics, and - * configuration. - */ -class SimpleEntityWriter extends EntityWriter { - private static final Log LOG = LogFactory.getLog(SimpleEntityWriter.class); - - // constants for mtype = 1 - static final String KBS_SENT = "kbs sent"; - static final int KBS_SENT_DEFAULT = 1; - static final String TEST_TIMES = "testtimes"; - static final int TEST_TIMES_DEFAULT = 100; - static final String TIMELINE_SERVICE_PERFORMANCE_RUN_ID = - "timeline.server.performance.run.id"; + * Adds simple entities with random string payload, events, metrics, and + * configuration. + */ +class SimpleEntityWriterV2 extends EntityWriterV2 + implements SimpleEntityWriterConstants { + private static final Log LOG = LogFactory.getLog(SimpleEntityWriterV2.class); protected void writeEntities(Configuration tlConf, TimelineCollectorManager manager, Context context) throws IOException { @@ -87,8 +78,8 @@ protected void writeEntities(Configuration tlConf, // Generate a fixed length random payload for (int xx = 0; xx < kbs * 1024; xx++) { int alphaNumIdx = - rand.nextInt(TimelineServicePerformanceV2.alphaNums.length); - payLoad[xx] = TimelineServicePerformanceV2.alphaNums[alphaNumIdx]; + rand.nextInt(ALPHA_NUMS.length); + payLoad[xx] = ALPHA_NUMS[alphaNumIdx]; } String entId = taskAttemptId + "_" + Integer.toString(i); final TimelineEntity entity = new TimelineEntity(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV1.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV1.java index 79d123eb53..4d8b74bd7b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV1.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV1.java @@ -25,11 +25,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.mapreduce.Counter; -import org.apache.hadoop.mapreduce.CounterGroup; -import org.apache.hadoop.mapreduce.Counters; -import org.apache.hadoop.mapreduce.TaskAttemptID; -import org.apache.hadoop.mapreduce.TaskID; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TimelineEntityConverter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV2.java similarity index 98% rename from hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TimelineEntityConverter.java rename to hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV2.java index 0e2eb7218e..79633d2835 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TimelineEntityConverter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV2.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.mapred; +package org.apache.hadoop.mapreduce; import java.util.ArrayList; import java.util.HashSet; @@ -38,9 +38,9 @@ import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric; -class TimelineEntityConverter { +class TimelineEntityConverterV2 { private static final Log LOG = - LogFactory.getLog(TimelineEntityConverter.class); + LogFactory.getLog(TimelineEntityConverterV2.class); static final String JOB = "MAPREDUCE_JOB"; static final String TASK = "MAPREDUCE_TASK"; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineServicePerformance.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineServicePerformance.java index 0753d7fa35..1a14137bd4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineServicePerformance.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineServicePerformance.java @@ -23,8 +23,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.SleepJob.SleepInputFormat; import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; import org.apache.hadoop.util.GenericOptionsParser; @@ -46,15 +44,19 @@ protected static int printUsage() { System.err.println( "Usage: [-m ] number of mappers (default: " + NUM_MAPS_DEFAULT + ")\n" + - " [-v] timeline service version\n" + - " [-mtype ]\n" + - " 1. simple entity write mapper (default)\n" + + " [-v] timeline service version (default: " + + TIMELINE_SERVICE_VERSION_1 + ")\n" + + " 1. version 1.x\n" + + " 2. version 2.x\n" + + " [-mtype ] (default: " + + SIMPLE_ENTITY_WRITER + ")\n" + + " 1. simple entity write mapper\n" + " 2. jobhistory files replay mapper\n" + " [-s <(KBs)test>] number of KB per put (mtype=1, default: " + - SimpleEntityWriterV1.KBS_SENT_DEFAULT + " KB)\n" + + SimpleEntityWriterConstants.KBS_SENT_DEFAULT + " KB)\n" + " [-t] package sending iterations per mapper (mtype=1, default: " + - SimpleEntityWriterV1.TEST_TIMES_DEFAULT + ")\n" + - " [-d ] root path of job history files (mtype=2)\n" + + SimpleEntityWriterConstants.TEST_TIMES_DEFAULT + ")\n" + + " [-d ] hdfs root path of job history files (mtype=2)\n" + " [-r ] (mtype=2)\n" + " 1. write all entities for a job in one put (default)\n" + " 2. write one entity at a time\n"); @@ -78,8 +80,7 @@ public static boolean parseArgs(String[] args, Job job) throws IOException { try { if ("-v".equals(args[i])) { timeline_service_version = Integer.parseInt(args[++i]); - } - if ("-m".equals(args[i])) { + } else if ("-m".equals(args[i])) { if (Integer.parseInt(args[++i]) > 0) { job.getConfiguration() .setInt(MRJobConfig.NUM_MAPS, Integer.parseInt(args[i])); @@ -88,11 +89,12 @@ public static boolean parseArgs(String[] args, Job job) throws IOException { mapperType = Integer.parseInt(args[++i]); } else if ("-s".equals(args[i])) { if (Integer.parseInt(args[++i]) > 0) { - conf.setInt(SimpleEntityWriterV1.KBS_SENT, Integer.parseInt(args[i])); + conf.setInt(SimpleEntityWriterConstants.KBS_SENT, + Integer.parseInt(args[i])); } } else if ("-t".equals(args[i])) { if (Integer.parseInt(args[++i]) > 0) { - conf.setInt(SimpleEntityWriterV1.TEST_TIMES, + conf.setInt(SimpleEntityWriterConstants.TEST_TIMES, Integer.parseInt(args[i])); } } else if ("-d".equals(args[i])) { @@ -113,28 +115,40 @@ public static boolean parseArgs(String[] args, Job job) throws IOException { } // handle mapper-specific settings - switch (timeline_service_version) { - case TIMELINE_SERVICE_VERSION_1: - default: - switch (mapperType) { - case JOB_HISTORY_FILE_REPLAY_MAPPER: - job.setMapperClass(JobHistoryFileReplayMapperV1.class); - String processingPath = - conf.get(JobHistoryFileReplayHelper.PROCESSING_PATH); - if (processingPath == null || processingPath.isEmpty()) { - System.out.println("processing path is missing while mtype = 2"); - return printUsage() == 0; - } + switch (mapperType) { + case JOB_HISTORY_FILE_REPLAY_MAPPER: + String processingPath = + conf.get(JobHistoryFileReplayHelper.PROCESSING_PATH); + if (processingPath == null || processingPath.isEmpty()) { + System.out.println("processing path is missing while mtype = 2"); + return printUsage() == 0; + } + switch (timeline_service_version) { + case TIMELINE_SERVICE_VERSION_2: + job.setMapperClass(JobHistoryFileReplayMapperV2.class); break; - case SIMPLE_ENTITY_WRITER: + case TIMELINE_SERVICE_VERSION_1: default: - job.setMapperClass(SimpleEntityWriterV1.class); - // use the current timestamp as the "run id" of the test: this will - // be used as simulating the cluster timestamp for apps - conf.setLong(SimpleEntityWriterV1.TIMELINE_SERVICE_PERFORMANCE_RUN_ID, - System.currentTimeMillis()); + job.setMapperClass(JobHistoryFileReplayMapperV1.class); break; } + break; + case SIMPLE_ENTITY_WRITER: + default: + // use the current timestamp as the "run id" of the test: this will + // be used as simulating the cluster timestamp for apps + conf.setLong(SimpleEntityWriterConstants.TIMELINE_SERVICE_PERFORMANCE_RUN_ID, + System.currentTimeMillis()); + switch (timeline_service_version) { + case TIMELINE_SERVICE_VERSION_2: + job.setMapperClass(SimpleEntityWriterV2.class); + break; + case TIMELINE_SERVICE_VERSION_1: + default: + job.setMapperClass(SimpleEntityWriterV1.class); + break; + } + break; } return true; } @@ -164,25 +178,46 @@ public int run(String[] args) throws Exception { Date startTime = new Date(); System.out.println("Job started: " + startTime); int ret = job.waitForCompletion(true) ? 0 : 1; - org.apache.hadoop.mapreduce.Counters counters = job.getCounters(); - long writetime = - counters.findCounter(PerfCounters.TIMELINE_SERVICE_WRITE_TIME).getValue(); - long writecounts = - counters.findCounter(PerfCounters.TIMELINE_SERVICE_WRITE_COUNTER).getValue(); - long writesize = - counters.findCounter(PerfCounters.TIMELINE_SERVICE_WRITE_KBS).getValue(); - double transacrate = writecounts * 1000 / (double)writetime; - double iorate = writesize * 1000 / (double)writetime; - int numMaps = - Integer.parseInt(job.getConfiguration().get(MRJobConfig.NUM_MAPS)); + if (job.isSuccessful()) { + org.apache.hadoop.mapreduce.Counters counters = job.getCounters(); + long writecounts = + counters.findCounter( + PerfCounters.TIMELINE_SERVICE_WRITE_COUNTER).getValue(); + long writefailures = + counters.findCounter( + PerfCounters.TIMELINE_SERVICE_WRITE_FAILURES).getValue(); + if (writefailures > 0 && writefailures == writecounts) { + // see if we have a complete failure to write + System.out.println("Job failed: all writes failed!"); + } else { + long writetime = + counters.findCounter( + PerfCounters.TIMELINE_SERVICE_WRITE_TIME).getValue(); + long writesize = + counters.findCounter( + PerfCounters.TIMELINE_SERVICE_WRITE_KBS).getValue(); + if (writetime == 0L) { + // see if write time is zero (normally shouldn't happen) + System.out.println("Job failed: write time is 0!"); + } else { + double transacrate = writecounts * 1000 / (double)writetime; + double iorate = writesize * 1000 / (double)writetime; + int numMaps = + Integer.parseInt( + job.getConfiguration().get(MRJobConfig.NUM_MAPS)); - System.out.println("TRANSACTION RATE (per mapper): " + transacrate + - " ops/s"); - System.out.println("IO RATE (per mapper): " + iorate + " KB/s"); + System.out.println("TRANSACTION RATE (per mapper): " + transacrate + + " ops/s"); + System.out.println("IO RATE (per mapper): " + iorate + " KB/s"); - System.out.println("TRANSACTION RATE (total): " + transacrate*numMaps + - " ops/s"); - System.out.println("IO RATE (total): " + iorate*numMaps + " KB/s"); + System.out.println("TRANSACTION RATE (total): " + + transacrate*numMaps + " ops/s"); + System.out.println("IO RATE (total): " + iorate*numMaps + " KB/s"); + } + } + } else { + System.out.println("Job failed: " + job.getStatus().getFailureInfo()); + } return ret; } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/test/MapredTestDriver.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/test/MapredTestDriver.java index 92e0b14c6c..4c42bd3f09 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/test/MapredTestDriver.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/test/MapredTestDriver.java @@ -18,6 +18,19 @@ package org.apache.hadoop.test; +import org.apache.hadoop.fs.DFSCIOTest; +import org.apache.hadoop.fs.DistributedFSCheck; +import org.apache.hadoop.fs.JHLogAnalyzer; +import org.apache.hadoop.fs.TestDFSIO; +import org.apache.hadoop.fs.TestFileSystem; +import org.apache.hadoop.fs.loadGenerator.DataGenerator; +import org.apache.hadoop.fs.loadGenerator.LoadGenerator; +import org.apache.hadoop.fs.loadGenerator.LoadGeneratorMR; +import org.apache.hadoop.fs.loadGenerator.StructureGenerator; +import org.apache.hadoop.fs.slive.SliveTest; +import org.apache.hadoop.hdfs.NNBench; +import org.apache.hadoop.hdfs.NNBenchWithoutMR; +import org.apache.hadoop.io.FileBench; import org.apache.hadoop.io.TestSequenceFile; import org.apache.hadoop.mapred.BigMapOutput; import org.apache.hadoop.mapred.GenericMRLoadGenerator; @@ -28,28 +41,13 @@ import org.apache.hadoop.mapred.TestSequenceFileInputFormat; import org.apache.hadoop.mapred.TestTextInputFormat; import org.apache.hadoop.mapred.ThreadedMapBenchmark; -import org.apache.hadoop.mapreduce.TimelineServicePerformance; -import org.apache.hadoop.mapred.TimelineServicePerformanceV2; import org.apache.hadoop.mapreduce.FailJob; import org.apache.hadoop.mapreduce.LargeSorter; import org.apache.hadoop.mapreduce.MiniHadoopClusterManager; import org.apache.hadoop.mapreduce.SleepJob; +import org.apache.hadoop.mapreduce.TimelineServicePerformance; import org.apache.hadoop.util.ProgramDriver; -import org.apache.hadoop.hdfs.NNBench; -import org.apache.hadoop.hdfs.NNBenchWithoutMR; -import org.apache.hadoop.fs.TestFileSystem; -import org.apache.hadoop.fs.TestDFSIO; -import org.apache.hadoop.fs.DFSCIOTest; -import org.apache.hadoop.fs.DistributedFSCheck; -import org.apache.hadoop.io.FileBench; -import org.apache.hadoop.fs.JHLogAnalyzer; -import org.apache.hadoop.fs.loadGenerator.DataGenerator; -import org.apache.hadoop.fs.loadGenerator.LoadGenerator; -import org.apache.hadoop.fs.loadGenerator.LoadGeneratorMR; -import org.apache.hadoop.fs.loadGenerator.StructureGenerator; -import org.apache.hadoop.fs.slive.SliveTest; - /** * Driver for Map-reduce tests. * @@ -93,9 +91,8 @@ public MapredTestDriver(ProgramDriver pgd) { pgd.addClass("sleep", SleepJob.class, "A job that sleeps at each map and reduce task."); pgd.addClass("timelineperformance", TimelineServicePerformance.class, - "A job that launches mappers to test timlineserver performance."); - pgd.addClass("timelineperformance", TimelineServicePerformanceV2.class, - "A job that launch mappers to test timline service v.2 performance."); + "A job that launches mappers to test timline service " + + "performance."); pgd.addClass("nnbench", NNBench.class, "A benchmark that stresses the namenode w/ MR."); pgd.addClass("nnbenchWithoutMR", NNBenchWithoutMR.class,