HADOOP-15531. Use commons-text instead of commons-lang in some classes to fix deprecation warnings. Contributed by Takanobu Asanuma.

This commit is contained in:
Akira Ajisaka 2018-07-13 11:42:12 -04:00
parent 17118f446c
commit 88625f5cd9
No known key found for this signature in database
GPG Key ID: C1EDBB9CA400FD50
30 changed files with 52 additions and 37 deletions

View File

@ -170,6 +170,10 @@
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
</exclusion>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>

View File

@ -171,6 +171,11 @@
<artifactId>commons-lang3</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.conf;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.text.StringEscapeUtils;
import java.util.Collection;
import java.util.Enumeration;

View File

@ -31,7 +31,7 @@
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.text.StringEscapeUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;

View File

@ -27,7 +27,7 @@
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.text.StrBuilder;
import org.apache.commons.text.TextStringBuilder;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.CommonConfigurationKeys;
@ -491,7 +491,7 @@ protected int getDefaultTop() {
/**
* Put output line to log and string buffer.
* */
protected void recordOutput(final StrBuilder result,
protected void recordOutput(final TextStringBuilder result,
final String outputLine) {
LOG.info(outputLine);
result.appendln(outputLine);
@ -501,7 +501,7 @@ protected void recordOutput(final StrBuilder result,
* Parse top number of nodes to be processed.
* @return top number of nodes to be processed.
*/
protected int parseTopNodes(final CommandLine cmd, final StrBuilder result)
protected int parseTopNodes(final CommandLine cmd, final TextStringBuilder result)
throws IllegalArgumentException {
String outputLine = "";
int nodes = 0;

View File

@ -23,7 +23,7 @@
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.text.StrBuilder;
import org.apache.commons.text.TextStringBuilder;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
@ -89,7 +89,7 @@ public PlanCommand(Configuration conf, final PrintStream ps) {
*/
@Override
public void execute(CommandLine cmd) throws Exception {
StrBuilder result = new StrBuilder();
TextStringBuilder result = new TextStringBuilder();
String outputLine = "";
LOG.debug("Processing Plan Command.");
Preconditions.checkState(cmd.hasOption(DiskBalancerCLI.PLAN));

View File

@ -25,7 +25,7 @@
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.text.StrBuilder;
import org.apache.commons.text.TextStringBuilder;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.server.diskbalancer.DiskBalancerException;
import org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerDataNode;
@ -67,7 +67,7 @@ public ReportCommand(Configuration conf, final PrintStream ps) {
@Override
public void execute(CommandLine cmd) throws Exception {
StrBuilder result = new StrBuilder();
TextStringBuilder result = new TextStringBuilder();
String outputLine = "Processing report command";
recordOutput(result, outputLine);
@ -99,7 +99,7 @@ public void execute(CommandLine cmd) throws Exception {
getPrintStream().println(result.toString());
}
private void handleTopReport(final CommandLine cmd, final StrBuilder result,
private void handleTopReport(final CommandLine cmd, final TextStringBuilder result,
final String nodeFormat) throws IllegalArgumentException {
Collections.sort(getCluster().getNodes(), Collections.reverseOrder());
@ -131,7 +131,7 @@ private void handleTopReport(final CommandLine cmd, final StrBuilder result,
}
}
private void handleNodeReport(final CommandLine cmd, StrBuilder result,
private void handleNodeReport(final CommandLine cmd, TextStringBuilder result,
final String nodeFormat, final String volumeFormat) throws Exception {
String outputLine = "";
/*
@ -175,7 +175,7 @@ private void handleNodeReport(final CommandLine cmd, StrBuilder result,
/**
* Put node report lines to string buffer.
*/
private void recordNodeReport(StrBuilder result, DiskBalancerDataNode dbdn,
private void recordNodeReport(TextStringBuilder result, DiskBalancerDataNode dbdn,
final String nodeFormat, final String volumeFormat) throws Exception {
final String trueStr = "True";
final String falseStr = "False";

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hdfs.server.namenode;
import static org.apache.commons.lang3.StringEscapeUtils.escapeJava;
import static org.apache.commons.text.StringEscapeUtils.escapeJava;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_DEFAULT;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_CALLER_CONTEXT_ENABLED_DEFAULT;

View File

@ -22,7 +22,7 @@
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.lang3.text.WordUtils;
import org.apache.commons.text.WordUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;

View File

@ -38,7 +38,7 @@
import com.google.common.base.Supplier;
import com.google.common.collect.Lists;
import org.apache.commons.lang3.text.StrBuilder;
import org.apache.commons.text.TextStringBuilder;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.FSDataOutputStream;
@ -661,7 +661,7 @@ public void testDecommissionWithOpenfile()
}
private static String scanIntoString(final ByteArrayOutputStream baos) {
final StrBuilder sb = new StrBuilder();
final TextStringBuilder sb = new TextStringBuilder();
final Scanner scanner = new Scanner(baos.toString());
while (scanner.hasNextLine()) {
sb.appendln(scanner.nextLine());

View File

@ -27,7 +27,7 @@
import com.google.common.collect.Lists;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.text.StrBuilder;
import org.apache.commons.text.TextStringBuilder;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
@ -518,7 +518,7 @@ public void testNameNodeGetReconfigurationStatus() throws IOException,
}
private static String scanIntoString(final ByteArrayOutputStream baos) {
final StrBuilder sb = new StrBuilder();
final TextStringBuilder sb = new TextStringBuilder();
final Scanner scanner = new Scanner(baos.toString());
while (scanner.hasNextLine()) {
sb.appendln(scanner.nextLine());

View File

@ -27,7 +27,7 @@
import java.util.EnumSet;
import java.util.Collection;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.text.StringEscapeUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;

View File

@ -24,7 +24,7 @@
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR_VALUE;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.text.StringEscapeUtils;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo;

View File

@ -21,7 +21,7 @@
import java.text.SimpleDateFormat;
import java.util.Date;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.text.StringEscapeUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.v2.app.AppContext;

View File

@ -29,7 +29,7 @@
import java.util.Collection;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.text.StringEscapeUtils;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;

View File

@ -1069,6 +1069,11 @@
<artifactId>commons-lang3</artifactId>
<version>3.7</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
<version>1.4</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>

View File

@ -867,7 +867,8 @@ String getHeader(QueueMetrics queueMetrics, NodesInformation nodes) {
TimeUnit.MILLISECONDS.toMinutes(uptime)
- TimeUnit.HOURS.toMinutes(TimeUnit.MILLISECONDS.toHours(uptime));
String uptimeStr = String.format("%dd, %d:%d", days, hours, minutes);
String currentTime = DateFormatUtils.ISO_TIME_NO_T_FORMAT.format(now);
String currentTime = DateFormatUtils.ISO_8601_EXTENDED_TIME_FORMAT
.format(now);
ret.append(CLEAR_LINE);
ret.append(limitLineLength(String.format(

View File

@ -26,7 +26,7 @@
import java.util.List;
import java.util.Set;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.text.StringEscapeUtils;
import org.apache.hadoop.classification.InterfaceAudience.Private;
@Private

View File

@ -28,7 +28,7 @@
import static java.util.EnumSet.*;
import java.util.Iterator;
import static org.apache.commons.lang3.StringEscapeUtils.*;
import static org.apache.commons.text.StringEscapeUtils.*;
import static org.apache.hadoop.yarn.webapp.hamlet.HamletImpl.EOpt.*;
import org.apache.hadoop.classification.InterfaceAudience;

View File

@ -28,7 +28,7 @@
import static java.util.EnumSet.*;
import java.util.Iterator;
import static org.apache.commons.lang3.StringEscapeUtils.*;
import static org.apache.commons.text.StringEscapeUtils.*;
import static org.apache.hadoop.yarn.webapp.hamlet2.HamletImpl.EOpt.*;
import org.apache.hadoop.classification.InterfaceAudience;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.yarn.webapp.view;
import static org.apache.commons.lang3.StringEscapeUtils.escapeEcmaScript;
import static org.apache.commons.text.StringEscapeUtils.escapeEcmaScript;
import static org.apache.hadoop.yarn.util.StringHelper.djoin;
import static org.apache.hadoop.yarn.util.StringHelper.join;
import static org.apache.hadoop.yarn.util.StringHelper.split;

View File

@ -20,7 +20,7 @@
import java.io.PrintWriter;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.text.StringEscapeUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.yarn.webapp.View;

View File

@ -25,7 +25,7 @@
import java.util.Collection;
import java.util.List;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.text.StringEscapeUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.ApplicationBaseProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;

View File

@ -28,7 +28,7 @@
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.text.StringEscapeUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.security.UserGroupInformation;

View File

@ -32,7 +32,7 @@
import java.util.EnumSet;
import java.util.List;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.text.StringEscapeUtils;
import org.apache.commons.lang3.Range;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;

View File

@ -29,7 +29,7 @@
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.text.StringEscapeUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;

View File

@ -27,7 +27,7 @@
import java.util.Collection;
import java.util.List;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.text.StringEscapeUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;

View File

@ -25,7 +25,7 @@
import java.util.List;
import java.util.Set;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.text.StringEscapeUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsRequest;

View File

@ -26,7 +26,7 @@
import java.util.List;
import java.util.Set;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.text.StringEscapeUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.yarn.server.router.webapp;
import static org.apache.commons.lang3.StringEscapeUtils.escapeHtml4;
import static org.apache.commons.lang3.StringEscapeUtils.escapeEcmaScript;
import static org.apache.commons.text.StringEscapeUtils.escapeHtml4;
import static org.apache.commons.text.StringEscapeUtils.escapeEcmaScript;
import static org.apache.hadoop.yarn.util.StringHelper.join;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR_VALUE;