From 98fcffe93f9ef910654574f69591fcdc621523af Mon Sep 17 00:00:00 2001 From: Ahmed Hussein Date: Wed, 15 Jul 2020 09:53:18 -0500 Subject: [PATCH] HADOOP-17101. Replace Guava Function with Java8+ Function Signed-off-by: Jonathan Eagles --- .../main/resources/checkstyle/checkstyle.xml | 2 +- .../hdfs/server/blockmanagement/HostSet.java | 25 +++++++++-------- .../hdfs/server/namenode/JournalSet.java | 22 +++++++-------- .../hdfs/server/protocol/RemoteEditLog.java | 11 +++----- .../hdfs/server/namenode/ha/HATestUtil.java | 16 ++++------- .../lib/input/TestFileInputFormat.java | 27 +++++++------------ .../impl/pb/GetApplicationsRequestPBImpl.java | 12 ++------- 7 files changed, 44 insertions(+), 71 deletions(-) diff --git a/hadoop-build-tools/src/main/resources/checkstyle/checkstyle.xml b/hadoop-build-tools/src/main/resources/checkstyle/checkstyle.xml index 54a5943738..e0a55f70a6 100644 --- a/hadoop-build-tools/src/main/resources/checkstyle/checkstyle.xml +++ b/hadoop-build-tools/src/main/resources/checkstyle/checkstyle.xml @@ -123,7 +123,7 @@ + value="^com\.google\.common\.base\.(Optional|Function), ^com\.google\.common\.collect\.(ImmutableListMultimap)"/> diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/HostSet.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/HostSet.java index cf7cfac95a..1d16caef86 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/HostSet.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/HostSet.java @@ -17,15 +17,12 @@ */ package org.apache.hadoop.hdfs.server.blockmanagement; -import com.google.common.base.Function; -import com.google.common.base.Joiner; + import com.google.common.base.Preconditions; import com.google.common.collect.HashMultimap; -import com.google.common.collect.Iterators; import com.google.common.collect.Multimap; import com.google.common.collect.UnmodifiableIterator; -import javax.annotation.Nullable; import java.net.InetAddress; import java.net.InetSocketAddress; import java.util.Collection; @@ -101,14 +98,16 @@ public InetSocketAddress next() { @Override public String toString() { StringBuilder sb = new StringBuilder("HostSet("); - Joiner.on(",").appendTo(sb, Iterators.transform(iterator(), - new Function() { - @Override - public String apply(@Nullable InetSocketAddress addr) { - assert addr != null; - return addr.getAddress().getHostAddress() + ":" + addr.getPort(); - } - })); - return sb.append(")").toString(); + Iterator iter = iterator(); + String sep = ""; + while (iter.hasNext()) { + InetSocketAddress addr = iter.next(); + sb.append(sep); + sb.append(addr.getAddress().getHostAddress()); + sb.append(':'); + sb.append(addr.getPort()); + sep = ","; + } + return sb.append(')').toString(); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java index a5df9f53b9..eb3a140ba4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java @@ -24,8 +24,10 @@ import java.util.Collection; import java.util.Collections; import java.util.Comparator; +import java.util.HashMap; import java.util.LinkedList; import java.util.List; +import java.util.Map; import java.util.PriorityQueue; import java.util.SortedSet; import java.util.concurrent.CopyOnWriteArrayList; @@ -38,13 +40,9 @@ import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog; import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest; - import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.Lists; -import com.google.common.collect.Multimaps; import com.google.common.collect.Sets; /** @@ -634,7 +632,7 @@ public void apply(JournalAndStream jas) throws IOException { */ public synchronized RemoteEditLogManifest getEditLogManifest(long fromTxId) { // Collect RemoteEditLogs available from each FileJournalManager - List allLogs = Lists.newArrayList(); + List allLogs = new ArrayList<>(); for (JournalAndStream j : journals) { if (j.getManager() instanceof FileJournalManager) { FileJournalManager fjm = (FileJournalManager)j.getManager(); @@ -645,15 +643,17 @@ public synchronized RemoteEditLogManifest getEditLogManifest(long fromTxId) { } } } - // Group logs by their starting txid - ImmutableListMultimap logsByStartTxId = - Multimaps.index(allLogs, RemoteEditLog.GET_START_TXID); + final Map> logsByStartTxId = new HashMap<>(); + allLogs.forEach(input -> { + long key = RemoteEditLog.GET_START_TXID.apply(input); + logsByStartTxId.computeIfAbsent(key, k-> new ArrayList<>()).add(input); + }); long curStartTxId = fromTxId; - - List logs = Lists.newArrayList(); + List logs = new ArrayList<>(); while (true) { - ImmutableList logGroup = logsByStartTxId.get(curStartTxId); + List logGroup = + logsByStartTxId.getOrDefault(curStartTxId, Collections.emptyList()); if (logGroup.isEmpty()) { // we have a gap in logs - for example because we recovered some old // storage directory with ancient logs. Clear out any logs we've diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/RemoteEditLog.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/RemoteEditLog.java index 1d26bc4983..427daf1329 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/RemoteEditLog.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/RemoteEditLog.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hdfs.server.protocol; -import com.google.common.base.Function; import com.google.common.collect.ComparisonChain; +import java.util.function.Function; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; public class RemoteEditLog implements Comparable { @@ -82,16 +82,13 @@ public int hashCode() { } /** - * Guava Function which applies {@link #getStartTxId()} + * Java Function which applies {@link #getStartTxId()} */ public static final Function GET_START_TXID = - new Function() { - @Override - public Long apply(RemoteEditLog log) { + log -> { if (null == log) { return HdfsServerConstants.INVALID_TXID; } return log.getStartTxId(); - } - }; + }; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/HATestUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/HATestUtil.java index 261bf8cf6a..aa7f4fb069 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/HATestUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/HATestUtil.java @@ -37,9 +37,7 @@ import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.LongAccumulator; -import com.google.common.base.Function; import com.google.common.base.Joiner; -import com.google.common.collect.Iterables; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -304,15 +302,11 @@ public static void setFailoverConfigurations(Configuration conf, String logicalN public static

> void setFailoverConfigurations(Configuration conf, String logicalName, List nnAddresses, Class

classFPP) { - setFailoverConfigurations(conf, logicalName, - Iterables.transform(nnAddresses, new Function() { - - // transform the inet address to a simple string - @Override - public String apply(InetSocketAddress addr) { - return "hdfs://" + addr.getHostName() + ":" + addr.getPort(); - } - }), classFPP); + final List addresses = new ArrayList(); + nnAddresses.forEach( + addr -> addresses.add( + "hdfs://" + addr.getHostName() + ":" + addr.getPort())); + setFailoverConfigurations(conf, logicalName, addresses, classFPP); } public static

> diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFileInputFormat.java index ca30bf3475..da57e6ffda 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFileInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFileInputFormat.java @@ -23,8 +23,7 @@ import java.util.Collection; import java.util.List; import java.util.Set; - -import javax.annotation.Nullable; +import java.util.stream.Collectors; import org.junit.Assert; @@ -55,8 +54,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.base.Function; -import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; @@ -403,13 +400,10 @@ public static void verifyFileStatuses(List expectedPaths, List fetchedStatuses, final FileSystem localFs) { Assert.assertEquals(expectedPaths.size(), fetchedStatuses.size()); - Iterable fqExpectedPaths = Iterables.transform(expectedPaths, - new Function() { - @Override - public Path apply(Path input) { - return localFs.makeQualified(input); - } - }); + Iterable fqExpectedPaths = + expectedPaths.stream().map( + input -> localFs.makeQualified(input)).collect(Collectors.toList()); + Set expectedPathSet = Sets.newHashSet(fqExpectedPaths); for (FileStatus fileStatus : fetchedStatuses) { @@ -424,13 +418,10 @@ public Path apply(Path input) { private void verifySplits(List expected, List splits) { - Iterable pathsFromSplits = Iterables.transform(splits, - new Function() { - @Override - public String apply(@Nullable InputSplit input) { - return ((FileSplit) input).getPath().toString(); - } - }); + Iterable pathsFromSplits = + splits.stream().map( + input-> ((FileSplit) input).getPath().toString()) + .collect(Collectors.toList()); Set expectedSet = Sets.newHashSet(expected); for (String splitPathString : pathsFromSplits) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java index 73d9287aa7..32b576aeec 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java @@ -22,7 +22,6 @@ import java.util.HashSet; import java.util.List; import java.util.Set; - import org.apache.commons.lang3.Range; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; @@ -35,8 +34,6 @@ import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProtoOrBuilder; -import com.google.common.base.Function; -import com.google.common.collect.Iterables; import org.apache.hadoop.thirdparty.protobuf.TextFormat; @Private @@ -88,13 +85,8 @@ private void mergeLocalToBuilder() { } if (applicationStates != null && !applicationStates.isEmpty()) { builder.clearApplicationStates(); - builder.addAllApplicationStates(Iterables.transform(applicationStates, - new Function() { - @Override - public YarnApplicationStateProto apply(YarnApplicationState input) { - return ProtoUtils.convertToProtoFormat(input); - } - })); + applicationStates.forEach(input -> + builder.addApplicationStates(ProtoUtils.convertToProtoFormat(input))); } if (applicationTags != null && !applicationTags.isEmpty()) { builder.clearApplicationTags();