MAPREDUCE-7098. Upgrade common-langs version to 3.7 in hadoop-mapreduce-project

Signed-off-by: Akira Ajisaka <aajisaka@apache.org>
This commit is contained in:
Takanobu Asanuma 2018-05-31 18:31:10 +09:00 committed by Akira Ajisaka
parent 02c4b89f99
commit d1e2b80980
No known key found for this signature in database
GPG Key ID: C1EDBB9CA400FD50
22 changed files with 39 additions and 44 deletions

View File

@ -36,7 +36,7 @@
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.apache.commons.lang.SystemUtils; import org.apache.commons.lang.SystemUtils;
import org.apache.commons.lang.time.FastDateFormat; import org.apache.commons.lang3.time.FastDateFormat;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;

View File

@ -40,7 +40,7 @@
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.apache.commons.lang.time.FastDateFormat; import org.apache.commons.lang3.time.FastDateFormat;
import org.apache.hadoop.test.UnitTestcaseTimeLimit; import org.apache.hadoop.test.UnitTestcaseTimeLimit;
import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix; import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
import org.junit.Test; import org.junit.Test;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.mapreduce.v2.app.job.impl; package org.apache.hadoop.mapreduce.v2.app.job.impl;
import static org.apache.commons.lang.StringUtils.isEmpty; import static org.apache.commons.lang3.StringUtils.isEmpty;
import java.io.IOException; import java.io.IOException;
import java.net.InetAddress; import java.net.InetAddress;

View File

@ -25,7 +25,7 @@
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.JobACL; import org.apache.hadoop.mapreduce.JobACL;
import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.JobId;

View File

@ -27,7 +27,7 @@
import java.util.EnumSet; import java.util.EnumSet;
import java.util.Collection; import java.util.Collection;
import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.JobId;
@ -134,8 +134,8 @@ protected void render(Block html) {
.append(getAttemptId(taskId, ta)).append("\",\"") .append(getAttemptId(taskId, ta)).append("\",\"")
.append(progress).append("\",\"") .append(progress).append("\",\"")
.append(ta.getState().toString()).append("\",\"") .append(ta.getState().toString()).append("\",\"")
.append(StringEscapeUtils.escapeJavaScript( .append(StringEscapeUtils.escapeEcmaScript(
StringEscapeUtils.escapeHtml(ta.getStatus()))).append("\",\"") StringEscapeUtils.escapeHtml4(ta.getStatus()))).append("\",\"")
.append(nodeHttpAddr == null ? "N/A" : .append(nodeHttpAddr == null ? "N/A" :
"<a class='nodelink' href='" + MRWebAppUtil.getYARNWebappScheme() + nodeHttpAddr + "'>" "<a class='nodelink' href='" + MRWebAppUtil.getYARNWebappScheme() + nodeHttpAddr + "'>"
@ -151,8 +151,8 @@ protected void render(Block html) {
.append(ta.getStartTime()).append("\",\"") .append(ta.getStartTime()).append("\",\"")
.append(ta.getFinishTime()).append("\",\"") .append(ta.getFinishTime()).append("\",\"")
.append(ta.getElapsedTime()).append("\",\"") .append(ta.getElapsedTime()).append("\",\"")
.append(StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml( .append(StringEscapeUtils.escapeEcmaScript(
diag))); StringEscapeUtils.escapeHtml4(diag)));
if (enableUIActions) { if (enableUIActions) {
attemptsTableData.append("\",\""); attemptsTableData.append("\",\"");
if (EnumSet.of( if (EnumSet.of(

View File

@ -24,7 +24,7 @@
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR; import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR_VALUE; import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR_VALUE;
import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.job.Task; import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo;
@ -103,8 +103,8 @@ public class TasksBlock extends HtmlBlock {
.append(join(pct, '%')).append("'> ").append("<div class='") .append(join(pct, '%')).append("'> ").append("<div class='")
.append(C_PROGRESSBAR_VALUE).append("' style='") .append(C_PROGRESSBAR_VALUE).append("' style='")
.append(join("width:", pct, '%')).append("'> </div> </div>\",\"") .append(join("width:", pct, '%')).append("'> </div> </div>\",\"")
.append(StringEscapeUtils.escapeJavaScript( .append(StringEscapeUtils.escapeEcmaScript(
StringEscapeUtils.escapeHtml(info.getStatus()))).append("\",\"") StringEscapeUtils.escapeHtml4(info.getStatus()))).append("\",\"")
.append(info.getState()).append("\",\"") .append(info.getState()).append("\",\"")
.append(info.getStartTime()).append("\",\"") .append(info.getStartTime()).append("\",\"")

View File

@ -22,7 +22,7 @@
import java.io.IOException; import java.io.IOException;
import java.util.Iterator; import java.util.Iterator;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.JobACL; import org.apache.hadoop.mapreduce.JobACL;

View File

@ -17,7 +17,7 @@
*/ */
package org.apache.hadoop.mapreduce.checkpoint; package org.apache.hadoop.mapreduce.checkpoint;
import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang3.RandomStringUtils;
/** /**
* Simple naming service that generates a random checkpoint name. * Simple naming service that generates a random checkpoint name.

View File

@ -17,7 +17,7 @@
*/ */
package org.apache.hadoop.mapreduce.jobhistory; package org.apache.hadoop.mapreduce.jobhistory;
import org.apache.commons.lang.time.FastDateFormat; import org.apache.commons.lang3.time.FastDateFormat;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.mapred.JobStatus; import org.apache.hadoop.mapred.JobStatus;

View File

@ -23,7 +23,7 @@
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -22,7 +22,7 @@
import java.util.HashSet; import java.util.HashSet;
import java.util.Set; import java.util.Set;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -31,7 +31,7 @@
import java.util.Arrays; import java.util.Arrays;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;

View File

@ -22,7 +22,7 @@
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang3.ArrayUtils;
import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.NullWritable;

View File

@ -21,7 +21,7 @@
import java.text.SimpleDateFormat; import java.text.SimpleDateFormat;
import java.util.Date; import java.util.Date;
import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo; import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo;
@ -83,11 +83,11 @@ public class HsJobsBlock extends HtmlBlock {
.append(dateFormat.format(new Date(job.getFinishTime()))).append("\",\"") .append(dateFormat.format(new Date(job.getFinishTime()))).append("\",\"")
.append("<a href='").append(url("job", job.getId())).append("'>") .append("<a href='").append(url("job", job.getId())).append("'>")
.append(job.getId()).append("</a>\",\"") .append(job.getId()).append("</a>\",\"")
.append(StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml( .append(StringEscapeUtils.escapeEcmaScript(StringEscapeUtils.escapeHtml4(
job.getName()))).append("\",\"") job.getName()))).append("\",\"")
.append(StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml( .append(StringEscapeUtils.escapeEcmaScript(StringEscapeUtils.escapeHtml4(
job.getUserName()))).append("\",\"") job.getUserName()))).append("\",\"")
.append(StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml( .append(StringEscapeUtils.escapeEcmaScript(StringEscapeUtils.escapeHtml4(
job.getQueueName()))).append("\",\"") job.getQueueName()))).append("\",\"")
.append(job.getState()).append("\",\"") .append(job.getState()).append("\",\"")
.append(String.valueOf(job.getMapsTotal())).append("\",\"") .append(String.valueOf(job.getMapsTotal())).append("\",\"")

View File

@ -29,7 +29,7 @@
import java.util.Collection; import java.util.Collection;
import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
@ -147,8 +147,8 @@ protected void render(Block html) {
attemptsTableData.append("[\"") attemptsTableData.append("[\"")
.append(getAttemptId(taskId, ta)).append("\",\"") .append(getAttemptId(taskId, ta)).append("\",\"")
.append(ta.getState()).append("\",\"") .append(ta.getState()).append("\",\"")
.append(StringEscapeUtils.escapeJavaScript( .append(StringEscapeUtils.escapeEcmaScript(
StringEscapeUtils.escapeHtml(ta.getStatus()))).append("\",\"") StringEscapeUtils.escapeHtml4(ta.getStatus()))).append("\",\"")
.append("<a class='nodelink' href='" + MRWebAppUtil.getYARNWebappScheme() + nodeHttpAddr + "'>") .append("<a class='nodelink' href='" + MRWebAppUtil.getYARNWebappScheme() + nodeHttpAddr + "'>")
.append(nodeRackName + "/" + nodeHttpAddr + "</a>\",\"") .append(nodeRackName + "/" + nodeHttpAddr + "</a>\",\"")
@ -171,8 +171,8 @@ protected void render(Block html) {
.append(elapsedReduceTime).append("\",\""); .append(elapsedReduceTime).append("\",\"");
} }
attemptsTableData.append(attemptElapsed).append("\",\"") attemptsTableData.append(attemptElapsed).append("\",\"")
.append(StringEscapeUtils.escapeJavaScript( .append(StringEscapeUtils.escapeEcmaScript(
StringEscapeUtils.escapeHtml(ta.getNote()))) StringEscapeUtils.escapeHtml4(ta.getNote())))
.append("\"],\n"); .append("\"],\n");
} }
//Remove the last comma and close off the array of arrays //Remove the last comma and close off the array of arrays

View File

@ -22,7 +22,7 @@
import java.security.PrivilegedAction; import java.security.PrivilegedAction;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.JobID;

View File

@ -28,7 +28,7 @@
import java.util.List; import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RPC;

View File

@ -23,7 +23,7 @@
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import org.apache.commons.lang.NotImplementedException; import org.apache.commons.lang3.NotImplementedException;
import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol; import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.CancelDelegationTokenRequest; import org.apache.hadoop.mapreduce.v2.api.protocolrecords.CancelDelegationTokenRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.CancelDelegationTokenResponse; import org.apache.hadoop.mapreduce.v2.api.protocolrecords.CancelDelegationTokenResponse;
@ -167,7 +167,7 @@ public GetTaskAttemptCompletionEventsResponse getTaskAttemptCompletionEvents(
public GetTaskAttemptReportResponse getTaskAttemptReport( public GetTaskAttemptReportResponse getTaskAttemptReport(
GetTaskAttemptReportRequest request) throws IOException { GetTaskAttemptReportRequest request) throws IOException {
//not invoked by anybody //not invoked by anybody
throw new NotImplementedException(); throw new NotImplementedException("Code is not implemented");
} }
@Override @Override
@ -222,26 +222,26 @@ public KillTaskAttemptResponse killTaskAttempt(
public GetDelegationTokenResponse getDelegationToken( public GetDelegationTokenResponse getDelegationToken(
GetDelegationTokenRequest request) throws IOException { GetDelegationTokenRequest request) throws IOException {
/* Should not be invoked by anyone. */ /* Should not be invoked by anyone. */
throw new NotImplementedException(); throw new NotImplementedException("Code is not implemented");
} }
@Override @Override
public RenewDelegationTokenResponse renewDelegationToken( public RenewDelegationTokenResponse renewDelegationToken(
RenewDelegationTokenRequest request) throws IOException { RenewDelegationTokenRequest request) throws IOException {
/* Should not be invoked by anyone. */ /* Should not be invoked by anyone. */
throw new NotImplementedException(); throw new NotImplementedException("Code is not implemented");
} }
@Override @Override
public CancelDelegationTokenResponse cancelDelegationToken( public CancelDelegationTokenResponse cancelDelegationToken(
CancelDelegationTokenRequest request) throws IOException { CancelDelegationTokenRequest request) throws IOException {
/* Should not be invoked by anyone. */ /* Should not be invoked by anyone. */
throw new NotImplementedException(); throw new NotImplementedException("Code is not implemented");
} }
@Override @Override
public InetSocketAddress getConnectAddress() { public InetSocketAddress getConnectAddress() {
/* Should not be invoked by anyone. Normally used to set token service */ /* Should not be invoked by anyone. Normally used to set token service */
throw new NotImplementedException(); throw new NotImplementedException("Code is not implemented");
} }
} }

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.mapred; package org.apache.hadoop.mapred;
import static org.apache.commons.lang.StringUtils.isEmpty; import static org.apache.commons.lang3.StringUtils.isEmpty;
import static org.apache.hadoop.mapreduce.MRJobConfig.MR_AM_RESOURCE_PREFIX; import static org.apache.hadoop.mapreduce.MRJobConfig.MR_AM_RESOURCE_PREFIX;
import java.io.IOException; import java.io.IOException;

View File

@ -29,7 +29,7 @@
import java.util.List; import java.util.List;
import java.util.StringTokenizer; import java.util.StringTokenizer;
import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;

View File

@ -20,7 +20,7 @@
import java.io.IOException; import java.io.IOException;
import java.lang.reflect.Field; import java.lang.reflect.Field;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Job;

View File

@ -142,11 +142,6 @@
<artifactId>commons-cli</artifactId> <artifactId>commons-cli</artifactId>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<scope>provided</scope>
</dependency>
<dependency> <dependency>
<groupId>commons-collections</groupId> <groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId> <artifactId>commons-collections</artifactId>