MAPREDUCE-7098. Upgrade common-langs version to 3.7 in hadoop-mapreduce-project

Signed-off-by: Akira Ajisaka <aajisaka@apache.org>
This commit is contained in:
Takanobu Asanuma 2018-05-31 18:31:10 +09:00 committed by Akira Ajisaka
parent 02c4b89f99
commit d1e2b80980
No known key found for this signature in database
GPG Key ID: C1EDBB9CA400FD50
22 changed files with 39 additions and 44 deletions

View File

@ -36,7 +36,7 @@
import java.util.regex.Pattern;
import org.apache.commons.lang.SystemUtils;
import org.apache.commons.lang.time.FastDateFormat;
import org.apache.commons.lang3.time.FastDateFormat;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.Path;

View File

@ -40,7 +40,7 @@
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
import org.apache.commons.lang.time.FastDateFormat;
import org.apache.commons.lang3.time.FastDateFormat;
import org.apache.hadoop.test.UnitTestcaseTimeLimit;
import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
import org.junit.Test;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.mapreduce.v2.app.job.impl;
import static org.apache.commons.lang.StringUtils.isEmpty;
import static org.apache.commons.lang3.StringUtils.isEmpty;
import java.io.IOException;
import java.net.InetAddress;

View File

@ -25,7 +25,7 @@
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.JobACL;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;

View File

@ -27,7 +27,7 @@
import java.util.EnumSet;
import java.util.Collection;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
@ -134,8 +134,8 @@ protected void render(Block html) {
.append(getAttemptId(taskId, ta)).append("\",\"")
.append(progress).append("\",\"")
.append(ta.getState().toString()).append("\",\"")
.append(StringEscapeUtils.escapeJavaScript(
StringEscapeUtils.escapeHtml(ta.getStatus()))).append("\",\"")
.append(StringEscapeUtils.escapeEcmaScript(
StringEscapeUtils.escapeHtml4(ta.getStatus()))).append("\",\"")
.append(nodeHttpAddr == null ? "N/A" :
"<a class='nodelink' href='" + MRWebAppUtil.getYARNWebappScheme() + nodeHttpAddr + "'>"
@ -151,8 +151,8 @@ protected void render(Block html) {
.append(ta.getStartTime()).append("\",\"")
.append(ta.getFinishTime()).append("\",\"")
.append(ta.getElapsedTime()).append("\",\"")
.append(StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml(
diag)));
.append(StringEscapeUtils.escapeEcmaScript(
StringEscapeUtils.escapeHtml4(diag)));
if (enableUIActions) {
attemptsTableData.append("\",\"");
if (EnumSet.of(

View File

@ -24,7 +24,7 @@
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR_VALUE;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo;
@ -103,8 +103,8 @@ public class TasksBlock extends HtmlBlock {
.append(join(pct, '%')).append("'> ").append("<div class='")
.append(C_PROGRESSBAR_VALUE).append("' style='")
.append(join("width:", pct, '%')).append("'> </div> </div>\",\"")
.append(StringEscapeUtils.escapeJavaScript(
StringEscapeUtils.escapeHtml(info.getStatus()))).append("\",\"")
.append(StringEscapeUtils.escapeEcmaScript(
StringEscapeUtils.escapeHtml4(info.getStatus()))).append("\",\"")
.append(info.getState()).append("\",\"")
.append(info.getStartTime()).append("\",\"")

View File

@ -22,7 +22,7 @@
import java.io.IOException;
import java.util.Iterator;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.JobACL;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.mapreduce.checkpoint;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.lang3.RandomStringUtils;
/**
* Simple naming service that generates a random checkpoint name.

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.mapreduce.jobhistory;
import org.apache.commons.lang.time.FastDateFormat;
import org.apache.commons.lang3.time.FastDateFormat;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.mapred.JobStatus;

View File

@ -23,7 +23,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;

View File

@ -22,7 +22,7 @@
import java.util.HashSet;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;

View File

@ -31,7 +31,7 @@
import java.util.Arrays;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.classification.InterfaceAudience.Private;

View File

@ -22,7 +22,7 @@
import java.util.Arrays;
import java.util.Collections;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;

View File

@ -21,7 +21,7 @@
import java.text.SimpleDateFormat;
import java.util.Date;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo;
@ -83,11 +83,11 @@ public class HsJobsBlock extends HtmlBlock {
.append(dateFormat.format(new Date(job.getFinishTime()))).append("\",\"")
.append("<a href='").append(url("job", job.getId())).append("'>")
.append(job.getId()).append("</a>\",\"")
.append(StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml(
.append(StringEscapeUtils.escapeEcmaScript(StringEscapeUtils.escapeHtml4(
job.getName()))).append("\",\"")
.append(StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml(
.append(StringEscapeUtils.escapeEcmaScript(StringEscapeUtils.escapeHtml4(
job.getUserName()))).append("\",\"")
.append(StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml(
.append(StringEscapeUtils.escapeEcmaScript(StringEscapeUtils.escapeHtml4(
job.getQueueName()))).append("\",\"")
.append(job.getState()).append("\",\"")
.append(String.valueOf(job.getMapsTotal())).append("\",\"")

View File

@ -29,7 +29,7 @@
import java.util.Collection;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
@ -147,8 +147,8 @@ protected void render(Block html) {
attemptsTableData.append("[\"")
.append(getAttemptId(taskId, ta)).append("\",\"")
.append(ta.getState()).append("\",\"")
.append(StringEscapeUtils.escapeJavaScript(
StringEscapeUtils.escapeHtml(ta.getStatus()))).append("\",\"")
.append(StringEscapeUtils.escapeEcmaScript(
StringEscapeUtils.escapeHtml4(ta.getStatus()))).append("\",\"")
.append("<a class='nodelink' href='" + MRWebAppUtil.getYARNWebappScheme() + nodeHttpAddr + "'>")
.append(nodeRackName + "/" + nodeHttpAddr + "</a>\",\"")
@ -171,8 +171,8 @@ protected void render(Block html) {
.append(elapsedReduceTime).append("\",\"");
}
attemptsTableData.append(attemptElapsed).append("\",\"")
.append(StringEscapeUtils.escapeJavaScript(
StringEscapeUtils.escapeHtml(ta.getNote())))
.append(StringEscapeUtils.escapeEcmaScript(
StringEscapeUtils.escapeHtml4(ta.getNote())))
.append("\"],\n");
}
//Remove the last comma and close off the array of arrays

View File

@ -22,7 +22,7 @@
import java.security.PrivilegedAction;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.mapreduce.JobID;

View File

@ -28,7 +28,7 @@
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.ipc.RPC;

View File

@ -23,7 +23,7 @@
import java.util.ArrayList;
import java.util.HashMap;
import org.apache.commons.lang.NotImplementedException;
import org.apache.commons.lang3.NotImplementedException;
import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.CancelDelegationTokenRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.CancelDelegationTokenResponse;
@ -167,7 +167,7 @@ public GetTaskAttemptCompletionEventsResponse getTaskAttemptCompletionEvents(
public GetTaskAttemptReportResponse getTaskAttemptReport(
GetTaskAttemptReportRequest request) throws IOException {
//not invoked by anybody
throw new NotImplementedException();
throw new NotImplementedException("Code is not implemented");
}
@Override
@ -222,26 +222,26 @@ public KillTaskAttemptResponse killTaskAttempt(
public GetDelegationTokenResponse getDelegationToken(
GetDelegationTokenRequest request) throws IOException {
/* Should not be invoked by anyone. */
throw new NotImplementedException();
throw new NotImplementedException("Code is not implemented");
}
@Override
public RenewDelegationTokenResponse renewDelegationToken(
RenewDelegationTokenRequest request) throws IOException {
/* Should not be invoked by anyone. */
throw new NotImplementedException();
throw new NotImplementedException("Code is not implemented");
}
@Override
public CancelDelegationTokenResponse cancelDelegationToken(
CancelDelegationTokenRequest request) throws IOException {
/* Should not be invoked by anyone. */
throw new NotImplementedException();
throw new NotImplementedException("Code is not implemented");
}
@Override
public InetSocketAddress getConnectAddress() {
/* Should not be invoked by anyone. Normally used to set token service */
throw new NotImplementedException();
throw new NotImplementedException("Code is not implemented");
}
}

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.mapred;
import static org.apache.commons.lang.StringUtils.isEmpty;
import static org.apache.commons.lang3.StringUtils.isEmpty;
import static org.apache.hadoop.mapreduce.MRJobConfig.MR_AM_RESOURCE_PREFIX;
import java.io.IOException;

View File

@ -29,7 +29,7 @@
import java.util.List;
import java.util.StringTokenizer;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;

View File

@ -20,7 +20,7 @@
import java.io.IOException;
import java.lang.reflect.Field;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Job;

View File

@ -142,11 +142,6 @@
<artifactId>commons-cli</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>