() {
- @Override
- public Long apply(RemoteEditLog log) {
+ log -> {
if (null == log) {
return HdfsServerConstants.INVALID_TXID;
}
return log.getStartTxId();
- }
- };
+ };
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/HATestUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/HATestUtil.java
index 261bf8cf6a..aa7f4fb069 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/HATestUtil.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/HATestUtil.java
@@ -37,9 +37,7 @@
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.LongAccumulator;
-import com.google.common.base.Function;
import com.google.common.base.Joiner;
-import com.google.common.collect.Iterables;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -304,15 +302,11 @@ public static void setFailoverConfigurations(Configuration conf, String logicalN
public static > void
setFailoverConfigurations(Configuration conf, String logicalName,
List nnAddresses, Class classFPP) {
- setFailoverConfigurations(conf, logicalName,
- Iterables.transform(nnAddresses, new Function() {
-
- // transform the inet address to a simple string
- @Override
- public String apply(InetSocketAddress addr) {
- return "hdfs://" + addr.getHostName() + ":" + addr.getPort();
- }
- }), classFPP);
+ final List addresses = new ArrayList();
+ nnAddresses.forEach(
+ addr -> addresses.add(
+ "hdfs://" + addr.getHostName() + ":" + addr.getPort()));
+ setFailoverConfigurations(conf, logicalName, addresses, classFPP);
}
public static >
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFileInputFormat.java
index 3897a9b2b3..104aacc5b7 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFileInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFileInputFormat.java
@@ -23,8 +23,7 @@
import java.util.Collection;
import java.util.List;
import java.util.Set;
-
-import javax.annotation.Nullable;
+import java.util.stream.Collectors;
import org.junit.Assert;
@@ -49,8 +48,6 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import com.google.common.base.Function;
-import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
@@ -353,13 +350,10 @@ public static void verifyFileStatuses(List expectedPaths,
List fetchedStatuses, final FileSystem localFs) {
Assert.assertEquals(expectedPaths.size(), fetchedStatuses.size());
- Iterable fqExpectedPaths = Iterables.transform(expectedPaths,
- new Function() {
- @Override
- public Path apply(Path input) {
- return localFs.makeQualified(input);
- }
- });
+ Iterable fqExpectedPaths =
+ expectedPaths.stream().map(
+ input -> localFs.makeQualified(input)).collect(Collectors.toList());
+
Set expectedPathSet = Sets.newHashSet(fqExpectedPaths);
for (FileStatus fileStatus : fetchedStatuses) {
@@ -374,13 +368,10 @@ public Path apply(Path input) {
private void verifySplits(List expected, List splits) {
- Iterable pathsFromSplits = Iterables.transform(splits,
- new Function() {
- @Override
- public String apply(@Nullable InputSplit input) {
- return ((FileSplit) input).getPath().toString();
- }
- });
+ Iterable pathsFromSplits =
+ splits.stream().map(
+ input-> ((FileSplit) input).getPath().toString())
+ .collect(Collectors.toList());
Set expectedSet = Sets.newHashSet(expected);
for (String splitPathString : pathsFromSplits) {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java
index 73d9287aa7..32b576aeec 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java
@@ -22,7 +22,6 @@
import java.util.HashSet;
import java.util.List;
import java.util.Set;
-
import org.apache.commons.lang3.Range;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
@@ -35,8 +34,6 @@
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProtoOrBuilder;
-import com.google.common.base.Function;
-import com.google.common.collect.Iterables;
import org.apache.hadoop.thirdparty.protobuf.TextFormat;
@Private
@@ -88,13 +85,8 @@ private void mergeLocalToBuilder() {
}
if (applicationStates != null && !applicationStates.isEmpty()) {
builder.clearApplicationStates();
- builder.addAllApplicationStates(Iterables.transform(applicationStates,
- new Function() {
- @Override
- public YarnApplicationStateProto apply(YarnApplicationState input) {
- return ProtoUtils.convertToProtoFormat(input);
- }
- }));
+ applicationStates.forEach(input ->
+ builder.addApplicationStates(ProtoUtils.convertToProtoFormat(input)));
}
if (applicationTags != null && !applicationTags.isEmpty()) {
builder.clearApplicationTags();