diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java index d55c80b1a7..4505aa90cd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java @@ -67,7 +67,7 @@ public static class GetfattrCommand extends FsCommand { "0x and 0s, respectively.\n" + ": The file or directory.\n"; private final static Function enValueOfFunc = - Enums.valueOfFunction(XAttrCodec.class); + Enums.stringConverter(XAttrCodec.class); private String name = null; private boolean dump = false; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java index 0ed91582c9..055bcaa582 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java @@ -55,7 +55,6 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; -import com.google.common.base.Throwables; import com.google.common.util.concurrent.ThreadFactoryBuilder; @InterfaceAudience.LimitedPrivate("HDFS") @@ -511,7 +510,7 @@ private synchronized void fenceOldActive(byte[] data) { doFence(target); } catch (Throwable t) { recordActiveAttempt(new ActiveAttemptRecord(false, "Unable to fence old active: " + StringUtils.stringifyException(t))); - Throwables.propagate(t); + throw t; } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/AbstractMetric.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/AbstractMetric.java index 6a11b875cd..0605156a86 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/AbstractMetric.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/AbstractMetric.java @@ -18,6 +18,7 @@ package org.apache.hadoop.metrics2; +import com.google.common.base.MoreObjects; import com.google.common.base.Objects; import static com.google.common.base.Preconditions.*; @@ -84,7 +85,7 @@ protected MetricsInfo info() { } @Override public String toString() { - return Objects.toStringHelper(this) + return MoreObjects.toStringHelper(this) .add("info", info) .add("value", value()) .toString(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsTag.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsTag.java index e9e50a4acb..68b07379c5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsTag.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsTag.java @@ -18,6 +18,7 @@ package org.apache.hadoop.metrics2; +import com.google.common.base.MoreObjects; import com.google.common.base.Objects; import static com.google.common.base.Preconditions.*; @@ -80,7 +81,7 @@ public String value() { } @Override public String toString() { - return Objects.toStringHelper(this) + return MoreObjects.toStringHelper(this) .add("info", info) .add("value", value()) .toString(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/AbstractMetricsRecord.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/AbstractMetricsRecord.java index 3684c7ef5c..fec29c2b4c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/AbstractMetricsRecord.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/AbstractMetricsRecord.java @@ -18,6 +18,7 @@ package org.apache.hadoop.metrics2.impl; +import com.google.common.base.MoreObjects; import com.google.common.base.Objects; import com.google.common.collect.Iterables; @@ -43,7 +44,7 @@ abstract class AbstractMetricsRecord implements MetricsRecord { } @Override public String toString() { - return Objects.toStringHelper(this) + return MoreObjects.toStringHelper(this) .add("timestamp", timestamp()) .add("name", name()) .add("description", description()) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MsInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MsInfo.java index 782f755720..5de7edcb0d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MsInfo.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MsInfo.java @@ -18,7 +18,7 @@ package org.apache.hadoop.metrics2.impl; -import com.google.common.base.Objects; +import com.google.common.base.MoreObjects; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics2.MetricsInfo; @@ -48,7 +48,7 @@ public enum MsInfo implements MetricsInfo { } @Override public String toString() { - return Objects.toStringHelper(this) + return MoreObjects.toStringHelper(this) .add("name", name()).add("description", desc) .toString(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsInfoImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsInfoImpl.java index dfb6c357a2..054f21147c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsInfoImpl.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsInfoImpl.java @@ -18,6 +18,7 @@ package org.apache.hadoop.metrics2.lib; +import com.google.common.base.MoreObjects; import com.google.common.base.Objects; import static com.google.common.base.Preconditions.*; import org.apache.hadoop.metrics2.MetricsInfo; @@ -55,7 +56,7 @@ class MetricsInfoImpl implements MetricsInfo { } @Override public String toString() { - return Objects.toStringHelper(this) + return MoreObjects.toStringHelper(this) .add("name", name).add("description", description) .toString(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsRegistry.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsRegistry.java index 1ef74f41c0..0af45a6942 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsRegistry.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsRegistry.java @@ -22,7 +22,7 @@ import java.util.Map; import com.google.common.collect.Maps; -import com.google.common.base.Objects; +import com.google.common.base.MoreObjects; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -416,7 +416,7 @@ public synchronized void snapshot(MetricsRecordBuilder builder, boolean all) { } @Override public String toString() { - return Objects.toStringHelper(this) + return MoreObjects.toStringHelper(this) .add("info", metricsInfo).add("tags", tags()).add("metrics", metrics()) .toString(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/source/JvmMetricsInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/source/JvmMetricsInfo.java index 55bb41720e..59a79fd4d6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/source/JvmMetricsInfo.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/source/JvmMetricsInfo.java @@ -18,11 +18,11 @@ package org.apache.hadoop.metrics2.source; -import com.google.common.base.Objects; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics2.MetricsInfo; +import com.google.common.base.MoreObjects; + /** * JVM and logging related metrics info instances */ @@ -60,7 +60,7 @@ public enum JvmMetricsInfo implements MetricsInfo { @Override public String description() { return desc; } @Override public String toString() { - return Objects.toStringHelper(this) + return MoreObjects.toStringHelper(this) .add("name", name()).add("description", desc) .toString(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MetricsCache.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MetricsCache.java index efcb286fae..753e307cef 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MetricsCache.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MetricsCache.java @@ -31,7 +31,7 @@ import org.apache.hadoop.metrics2.MetricsRecord; import org.apache.hadoop.metrics2.MetricsTag; -import com.google.common.base.Objects; +import com.google.common.base.MoreObjects; import com.google.common.collect.Maps; /** @@ -127,7 +127,7 @@ public Set> metricsEntrySet() { } @Override public String toString() { - return Objects.toStringHelper(this) + return MoreObjects.toStringHelper(this) .add("tags", tags).add("metrics", metrics) .toString(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ChildReaper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ChildReaper.java index 3bff187a28..86142fb6d3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ChildReaper.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ChildReaper.java @@ -19,7 +19,6 @@ package org.apache.hadoop.util.curator; import com.google.common.base.Preconditions; -import com.google.common.collect.Sets; import org.apache.curator.framework.recipes.locks.Reaper; import org.apache.curator.utils.CloseableUtils; import org.apache.curator.framework.CuratorFramework; @@ -34,6 +33,7 @@ import java.io.Closeable; import java.io.IOException; import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; @@ -82,7 +82,7 @@ public class ChildReaper implements Closeable * @since 15.0 */ public static Set newConcurrentHashSet() { - return Sets.newSetFromMap(new ConcurrentHashMap()); + return Collections.newSetFromMap(new ConcurrentHashMap()); } private enum State diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInotifyEventInputStream.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInotifyEventInputStream.java index 45bea5ef5f..bcdc1c8285 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInotifyEventInputStream.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInotifyEventInputStream.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hdfs; -import com.google.common.collect.Iterators; +import java.util.Collections; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hdfs.inotify.EventBatch; @@ -72,7 +72,7 @@ public class DFSInotifyEventInputStream { DFSInotifyEventInputStream(ClientProtocol namenode, Tracer tracer, long lastReadTxid) { this.namenode = namenode; - this.it = Iterators.emptyIterator(); + this.it = Collections.emptyIterator(); this.lastReadTxid = lastReadTxid; this.tracer = tracer; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java index a5324be841..57ef1bcec9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java @@ -1109,7 +1109,7 @@ public Void call() throws IOException { } linkWorkers.shutdown(); for (Future f : futures) { - Futures.get(f, IOException.class); + Futures.getChecked(f, IOException.class); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/AclTransformation.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/AclTransformation.java index c887e9dcb5..3e4a319d56 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/AclTransformation.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/AclTransformation.java @@ -28,7 +28,7 @@ import java.util.Iterator; import java.util.List; -import com.google.common.base.Objects; +import com.google.common.base.MoreObjects; import com.google.common.collect.ComparisonChain; import com.google.common.collect.Lists; import com.google.common.collect.Maps; @@ -366,7 +366,7 @@ private static void calculateMasks(List aclBuilder, for (AclEntry entry: aclBuilder) { scopeFound.add(entry.getScope()); if (entry.getType() == GROUP || entry.getName() != null) { - FsAction scopeUnionPerms = Objects.firstNonNull( + FsAction scopeUnionPerms = MoreObjects.firstNonNull( unionPerms.get(entry.getScope()), FsAction.NONE); unionPerms.put(entry.getScope(), scopeUnionPerms.or(entry.getPermission())); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java index fde54a8a43..db77d31be2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java @@ -63,7 +63,7 @@ public class JournalSet implements JournalManager { public int compare(EditLogInputStream elis1, EditLogInputStream elis2) { // we want local logs to be ordered earlier in the collection, and true // is considered larger than false, so we want to invert the booleans here - return ComparisonChain.start().compare(!elis1.isLocalLog(), + return ComparisonChain.start().compareFalseFirst(!elis1.isLocalLog(), !elis2.isLocalLog()).result(); } }; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQJMWithFaults.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQJMWithFaults.java index ecdbaf5296..c752f239e5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQJMWithFaults.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQJMWithFaults.java @@ -402,7 +402,7 @@ public void afterCall(InvocationOnMock invocation, boolean succeeded) { @Override protected ExecutorService createSingleThreadExecutor() { - return MoreExecutors.sameThreadExecutor(); + return MoreExecutors.newDirectExecutorService(); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQuorumJournalManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQuorumJournalManager.java index 7d770e067a..9aada1d155 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQuorumJournalManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQuorumJournalManager.java @@ -946,7 +946,7 @@ public AsyncLogger createLogger(Configuration conf, NamespaceInfo nsInfo, protected ExecutorService createSingleThreadExecutor() { // Don't parallelize calls to the quorum in the tests. // This makes the tests more deterministic. - return MoreExecutors.sameThreadExecutor(); + return MoreExecutors.newDirectExecutorService(); } }; diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index 8de7703359..68e8b5c296 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -514,7 +514,7 @@ com.google.guava guava - 11.0.2 + 21.0 com.google.code.gson diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java index 62b54e7ed8..66930e74bf 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java @@ -283,7 +283,7 @@ private void checkTags(Set tags) { "maximum allowed length of a tag is " + YarnConfiguration.APPLICATION_MAX_TAG_LENGTH); } - if (!CharMatcher.ASCII.matchesAllOf(tag)) { + if (!CharMatcher.ascii().matchesAllOf(tag)) { throw new IllegalArgumentException("A tag can only have ASCII " + "characters! Invalid tag - " + tag); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java index de6a52bda5..300bf3ee45 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java @@ -275,7 +275,7 @@ static List parseRoute(String pathSpec) { static String getPrefix(String pathSpec) { int start = 0; - while (CharMatcher.WHITESPACE.matches(pathSpec.charAt(start))) { + while (CharMatcher.whitespace().matches(pathSpec.charAt(start))) { ++start; } if (pathSpec.charAt(start) != '/') { @@ -291,7 +291,7 @@ static String getPrefix(String pathSpec) { char c; do { c = pathSpec.charAt(--ci); - } while (c == '/' || CharMatcher.WHITESPACE.matches(c)); + } while (c == '/' || CharMatcher.whitespace().matches(c)); return pathSpec.substring(start, ci + 1); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java index e211867f85..1f446e6d48 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java @@ -590,7 +590,7 @@ public void updateApplicationTimeout(RMApp app, this.rmContext.getStateStore() .updateApplicationStateSynchronously(appState, false, future); - Futures.get(future, YarnException.class); + Futures.getChecked(future, YarnException.class); // update in-memory ((RMAppImpl) app).updateApplicationTimeout(newExpireTime); @@ -627,7 +627,7 @@ public void updateApplicationPriority(UserGroupInformation callerUGI, return; } - Futures.get(future, YarnException.class); + Futures.getChecked(future, YarnException.class); // update in-memory ((RMAppImpl) app).setApplicationPriority(appPriority); @@ -710,7 +710,7 @@ private void updateAppDataToStateStore(String queue, RMApp app, false, future); try { - Futures.get(future, YarnException.class); + Futures.getChecked(future, YarnException.class); } catch (YarnException ex) { if (!toSuppressException) { throw ex; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java index 163a265918..d29d34e094 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java @@ -465,7 +465,7 @@ private void loadQueue(String parentName, Element element, Set reservableQueues, Set nonPreemptableQueues) throws AllocationConfigurationException { - String queueName = CharMatcher.WHITESPACE.trimFrom( + String queueName = CharMatcher.whitespace().trimFrom( element.getAttribute("name")); if (queueName.contains(".")) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/QueueManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/QueueManager.java index 3c601fa1a1..5b006dfcf9 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/QueueManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/QueueManager.java @@ -535,6 +535,6 @@ boolean isQueueNameValid(String node) { // use the same white space trim as in QueueMetrics() otherwise things fail // guava uses a different definition for whitespace than java. return !node.isEmpty() && - node.equals(CharMatcher.WHITESPACE.trimFrom(node)); + node.equals(CharMatcher.whitespace().trimFrom(node)); } }