diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java index fc6f957b96..5125be078d 100644 --- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java +++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java @@ -236,7 +236,7 @@ public static final String getServicePrincipal(String service, */ static final String[] getPrincipalNames(String keytabFileName) throws IOException { Keytab keytab = Keytab.loadKeytab(new File(keytabFileName)); - Set principals = new HashSet(); + Set principals = new HashSet<>(); List entries = keytab.getPrincipals(); for (PrincipalName entry : entries) { principals.add(entry.getName().replace("\\", "/")); diff --git a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/KerberosTestUtils.java b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/KerberosTestUtils.java index 8fc08e2171..293871bcd0 100644 --- a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/KerberosTestUtils.java +++ b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/KerberosTestUtils.java @@ -108,9 +108,9 @@ public AppConfigurationEntry[] getAppConfigurationEntry(String name) { public static T doAs(String principal, final Callable callable) throws Exception { LoginContext loginContext = null; try { - Set principals = new HashSet(); + Set principals = new HashSet<>(); principals.add(new KerberosPrincipal(KerberosTestUtils.getClientPrincipal())); - Subject subject = new Subject(false, principals, new HashSet(), new HashSet()); + Subject subject = new Subject(false, principals, new HashSet<>(), new HashSet<>()); loginContext = new LoginContext("", subject, null, new KerberosConfiguration(principal)); loginContext.login(); subject = loginContext.getSubject(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java index 5f720841d7..d8ceb58aba 100755 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java @@ -774,7 +774,7 @@ private void updatePropertiesWithDeprecatedKeys( private void handleDeprecation() { LOG.debug("Handling deprecation for all properties in config..."); DeprecationContext deprecations = deprecationContext.get(); - Set keys = new HashSet(); + Set keys = new HashSet<>(); keys.addAll(getProps().keySet()); for (Object item: keys) { LOG.debug("Handling deprecation for " + (String)item); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java index 298570bb55..22ac2ecbd7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java @@ -2372,8 +2372,7 @@ public FileStatus next(final AbstractFileSystem fs, final Path p) Set resolveAbstractFileSystems(final Path f) throws IOException { final Path absF = fixRelativePart(f); - final HashSet result - = new HashSet(); + final HashSet result = new HashSet<>(); new FSLinkResolver() { @Override public Void next(final AbstractFileSystem fs, final Path p) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java index de86bab6d3..eace6417dc 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java @@ -76,7 +76,7 @@ synchronized void returnToPool(ChannelSftp channel) { ConnectionInfo info = con2infoMap.get(channel); HashSet cons = idleConnections.get(info); if (cons == null) { - cons = new HashSet(); + cons = new HashSet<>(); idleConnections.put(info, cons); } cons.add(channel); @@ -94,7 +94,7 @@ synchronized void shutdown() { Set cons = con2infoMap.keySet(); if (cons != null && cons.size() > 0) { // make a copy since we need to modify the underlying Map - Set copy = new HashSet(cons); + Set copy = new HashSet<>(cons); // Initiate disconnect from all outstanding connections for (ChannelSftp con : copy) { try { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFormat.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFormat.java index 4dd20d1084..1228f76d84 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFormat.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFormat.java @@ -165,7 +165,7 @@ public String getOptValue(String option) { * @return Set{@literal <}String{@literal >} of the enabled options */ public Set getOpts() { - Set optSet = new HashSet(); + Set optSet = new HashSet<>(); for (Map.Entry entry : options.entrySet()) { if (entry.getValue()) { optSet.add(entry.getKey()); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Find.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Find.java index 199038a751..07baea89dd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Find.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Find.java @@ -96,7 +96,7 @@ private static void addExpression(Class clazz) { private Expression rootExpression; /** Set of path items returning a {@link Result#STOP} result. */ - private HashSet stopPaths = new HashSet(); + private HashSet stopPaths = new HashSet<>(); /** Register the expressions with the expression factory. */ private static void registerExpressions(ExpressionFactory factory) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java index da3955b125..e31a701a6e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java @@ -1037,7 +1037,7 @@ public FileSystem[] getChildFileSystems() { List> mountPoints = fsState.getMountPoints(); Map fsMap = initializeMountedFileSystems(mountPoints); - Set children = new HashSet(); + Set children = new HashSet<>(); for (InodeTree.MountPoint mountPoint : mountPoints) { FileSystem targetFs = fsMap.get(mountPoint.src); children.addAll(Arrays.asList(targetFs.getChildFileSystems())); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java index 69e8c99a1f..1f095c6c67 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java @@ -109,7 +109,7 @@ private static boolean payback(Map, Set> pool, T codec) { synchronized (pool) { codecSet = pool.get(codecClass); if (codecSet == null) { - codecSet = new HashSet(); + codecSet = new HashSet<>(); pool.put(codecClass, codecSet); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerialization.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerialization.java index cfbc60d104..544958e682 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerialization.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerialization.java @@ -64,7 +64,7 @@ public synchronized boolean accept(Class c) { private void getPackages() { String[] pkgList = getConf().getStrings(AVRO_REFLECT_PACKAGES); - packages = new HashSet(); + packages = new HashSet<>(); if (pkgList != null) { for (String pkg : pkgList) { packages.add(pkg.trim()); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java index 49029f97b3..f5f212b292 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java @@ -69,7 +69,7 @@ private void fetchServerMethods(Method method) throws IOException { } int[] serverMethodsCodes = serverInfo.getMethods(); if (serverMethodsCodes != null) { - serverMethods = new HashSet(serverMethodsCodes.length); + serverMethods = new HashSet<>(serverMethodsCodes.length); for (int m : serverMethodsCodes) { this.serverMethods.add(Integer.valueOf(m)); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRates.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRates.java index 19696bd839..90b5da01c0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRates.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRates.java @@ -19,11 +19,10 @@ package org.apache.hadoop.metrics2.lib; import java.lang.reflect.Method; +import java.util.HashSet; import java.util.Set; import static org.apache.hadoop.util.Preconditions.*; -import org.apache.hadoop.util.Sets; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.metrics2.MetricsRecordBuilder; @@ -44,7 +43,7 @@ public class MutableRates extends MutableMetric { static final Logger LOG = LoggerFactory.getLogger(MutableRates.class); private final MetricsRegistry registry; - private final Set> protocolCache = Sets.newHashSet(); + private final Set> protocolCache = new HashSet<>(); MutableRates(MetricsRegistry registry) { this.registry = checkNotNull(registry, "metrics registry"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRatesWithAggregation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRatesWithAggregation.java index dc37f96f4f..4c5f0a844a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRatesWithAggregation.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRatesWithAggregation.java @@ -18,9 +18,9 @@ package org.apache.hadoop.metrics2.lib; -import org.apache.hadoop.util.Sets; import java.lang.ref.WeakReference; import java.lang.reflect.Method; +import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; @@ -52,7 +52,7 @@ public class MutableRatesWithAggregation extends MutableMetric { LoggerFactory.getLogger(MutableRatesWithAggregation.class); private final Map globalMetrics = new ConcurrentHashMap<>(); - private final Set> protocolCache = Sets.newHashSet(); + private final Set> protocolCache = new HashSet<>(); private final ConcurrentLinkedDeque>> weakReferenceQueue = new ConcurrentLinkedDeque<>(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/AbstractDNSToSwitchMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/AbstractDNSToSwitchMapping.java index f050219398..5a13b00098 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/AbstractDNSToSwitchMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/AbstractDNSToSwitchMapping.java @@ -115,7 +115,7 @@ public String dumpTopology() { builder.append("Mapping: ").append(toString()).append("\n"); if (rack != null) { builder.append("Map:\n"); - Set switches = new HashSet(); + Set switches = new HashSet<>(); for (Map.Entry entry : rack.entrySet()) { builder.append(" ") .append(entry.getKey()) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java index 6644b3911b..ebb354e7db 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java @@ -1086,7 +1086,7 @@ private void interAddNodeWithEmptyRack(Node node) { String rackname = node.getNetworkLocation(); Set nodes = rackMap.get(rackname); if (nodes == null) { - nodes = new HashSet(); + nodes = new HashSet<>(); } if (!decommissionNodes.contains(node.getName())) { nodes.add(node.getName()); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/CompositeGroupsMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/CompositeGroupsMapping.java index 6f799c1542..deca6f1152 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/CompositeGroupsMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/CompositeGroupsMapping.java @@ -109,7 +109,7 @@ public void cacheGroupsAdd(List groups) throws IOException { @Override public synchronized Set getGroupsSet(String user) throws IOException { - Set groupSet = new HashSet(); + Set groupSet = new HashSet<>(); Set groups = null; for (GroupMappingServiceProvider provider : providersList) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java index aa06c59a64..5e466033fb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java @@ -65,7 +65,7 @@ public static List getNetgroupNames() { } private static Set getGroups() { - Set allGroups = new HashSet (); + Set allGroups = new HashSet<>(); for (Set userGroups : userToNetgroupsMap.values()) { allGroups.addAll(userGroups); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java index 39dc29a79e..6fabbfb47b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java @@ -105,8 +105,8 @@ public AccessControlList(String users, String groups) { * @param userGroupStrings build ACL from array of Strings */ private void buildACL(String[] userGroupStrings) { - users = new HashSet(); - groups = new HashSet(); + users = new HashSet<>(); + groups = new HashSet<>(); for (String aclPart : userGroupStrings) { if (aclPart != null && isWildCardACLValue(aclPart)) { allAllowed = true; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyServers.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyServers.java index 410e25f583..6f5283074d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyServers.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyServers.java @@ -33,7 +33,7 @@ public static void refresh() { } public static void refresh(Configuration conf){ - Collection tempServers = new HashSet(); + Collection tempServers = new HashSet<>(); // trusted proxy servers such as http proxies for (String host : conf.getTrimmedStrings(CONF_HADOOP_PROXYSERVERS)) { InetSocketAddress addr = new InetSocketAddress(host, 0); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/RestCsrfPreventionFilter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/RestCsrfPreventionFilter.java index b81ed8e901..7363ca0ba6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/RestCsrfPreventionFilter.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/RestCsrfPreventionFilter.java @@ -94,7 +94,7 @@ public void init(FilterConfig filterConfig) throws ServletException { void parseBrowserUserAgents(String userAgents) { String[] agentsArray = userAgents.split(","); - browserUserAgents = new HashSet(); + browserUserAgents = new HashSet<>(); for (String patternString : agentsArray) { browserUserAgents.add(Pattern.compile(patternString)); } @@ -102,7 +102,7 @@ void parseBrowserUserAgents(String userAgents) { void parseMethodsToIgnore(String mti) { String[] methods = mti.split(","); - methodsToIgnore = new HashSet(); + methodsToIgnore = new HashSet<>(); for (int i = 0; i < methods.length; i++) { methodsToIgnore.add(methods[i]); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java index cd24a97d7c..d0c0fac6e8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java @@ -716,7 +716,7 @@ public String getTrackingId() { /** Remove expired delegation tokens from cache */ private void removeExpiredToken() throws IOException { long now = Time.now(); - Set expiredTokens = new HashSet(); + Set expiredTokens = new HashSet<>(); synchronized (this) { Iterator> i = currentTokens.entrySet().iterator(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java index b55214451e..f4ede6f35e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java @@ -89,7 +89,7 @@ public abstract class DelegationTokenAuthenticationHandler public static final String TOKEN_KIND = PREFIX + "token-kind"; - private static final Set DELEGATION_TOKEN_OPS = new HashSet(); + private static final Set DELEGATION_TOKEN_OPS = new HashSet<>(); public static final String DELEGATION_TOKEN_UGI_ATTRIBUTE = "hadoop.security.delegation-token.ugi"; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java index 47aa9cc71a..31dfe59420 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java @@ -58,7 +58,7 @@ public FileBasedIPList(String fileName) { lines = null; } if (lines != null) { - addressList = new MachineList(new HashSet(Arrays.asList(lines))); + addressList = new MachineList(new HashSet<>(Arrays.asList(lines))); } else { addressList = null; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java index 5141740a3d..d94668356e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java @@ -135,7 +135,7 @@ public static void readFileToMapWithFileInputStream(String type, if (xmlInput) { readXmlFileToMapWithFileInputStream(type, filename, inputStream, map); } else { - HashSet nodes = new HashSet(); + HashSet nodes = new HashSet<>(); readFileToSetWithFileInputStream(type, filename, inputStream, nodes); for (String node : nodes) { map.put(node, null); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java index fbdd33331b..e85f850514 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java @@ -249,7 +249,7 @@ TimeUnit getTimeUnit() { } private final Set hooks = - Collections.synchronizedSet(new HashSet()); + Collections.synchronizedSet(new HashSet<>()); private AtomicBoolean shutdownInProgress = new AtomicBoolean(false); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java index 9fcf4a5eb5..9d9a43da2d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java @@ -80,9 +80,9 @@ public void initializeMemberVariables() { }; // Initialize used variables - xmlPropsToSkipCompare = new HashSet(); - xmlPrefixToSkipCompare = new HashSet(); - configurationPropsToSkipCompare = new HashSet(); + xmlPropsToSkipCompare = new HashSet<>(); + xmlPrefixToSkipCompare = new HashSet<>(); + configurationPropsToSkipCompare = new HashSet<>(); // Set error modes errorIfMissingConfigProps = true; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestValueQueue.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestValueQueue.java index 5da973c6a7..4805fca1d4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestValueQueue.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestValueQueue.java @@ -18,6 +18,8 @@ package org.apache.hadoop.crypto.key; import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; import java.util.Queue; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeoutException; @@ -32,7 +34,6 @@ import org.junit.Assert; import org.junit.Test; -import org.apache.hadoop.util.Sets; public class TestValueQueue { Logger LOG = LoggerFactory.getLogger(TestValueQueue.class); @@ -103,10 +104,10 @@ public void testWarmUp() throws Exception { Assert.assertEquals(5, fillInfos[0].num); Assert.assertEquals(5, fillInfos[1].num); Assert.assertEquals(5, fillInfos[2].num); - Assert.assertEquals(Sets.newHashSet("k1", "k2", "k3"), - Sets.newHashSet(fillInfos[0].key, + Assert.assertEquals(new HashSet<>(Arrays.asList("k1", "k2", "k3")), + new HashSet<>(Arrays.asList(fillInfos[0].key, fillInfos[1].key, - fillInfos[2].key)); + fillInfos[2].key))); vq.shutdown(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/kms/TestLoadBalancingKMSClientProvider.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/kms/TestLoadBalancingKMSClientProvider.java index 886297b745..3bc96c3e2f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/kms/TestLoadBalancingKMSClientProvider.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/kms/TestLoadBalancingKMSClientProvider.java @@ -39,6 +39,8 @@ import java.security.NoSuchAlgorithmException; import java.security.PrivilegedExceptionAction; import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.concurrent.TimeUnit; @@ -65,7 +67,6 @@ import org.junit.rules.Timeout; import org.mockito.Mockito; -import org.apache.hadoop.util.Sets; public class TestLoadBalancingKMSClientProvider { @@ -86,8 +87,8 @@ public void testCreation() throws Exception { KMSClientProvider[] providers = ((LoadBalancingKMSClientProvider) kp).getProviders(); assertEquals(1, providers.length); - assertEquals(Sets.newHashSet("http://host1:9600/kms/foo/v1/"), - Sets.newHashSet(providers[0].getKMSUrl())); + assertEquals(new HashSet<>(Collections.singleton("http://host1:9600/kms/foo/v1/")), + new HashSet<>(Collections.singleton(providers[0].getKMSUrl()))); kp = new KMSClientProvider.Factory().createProvider(new URI( "kms://http@host1;host2;host3:9600/kms/foo"), conf); @@ -95,12 +96,12 @@ public void testCreation() throws Exception { providers = ((LoadBalancingKMSClientProvider) kp).getProviders(); assertEquals(3, providers.length); - assertEquals(Sets.newHashSet("http://host1:9600/kms/foo/v1/", + assertEquals(new HashSet<>(Arrays.asList("http://host1:9600/kms/foo/v1/", "http://host2:9600/kms/foo/v1/", - "http://host3:9600/kms/foo/v1/"), - Sets.newHashSet(providers[0].getKMSUrl(), + "http://host3:9600/kms/foo/v1/")), + new HashSet<>(Arrays.asList(providers[0].getKMSUrl(), providers[1].getKMSUrl(), - providers[2].getKMSUrl())); + providers[2].getKMSUrl()))); kp = new KMSClientProvider.Factory().createProvider(new URI( "kms://http@host1;host2;host3:9600/kms/foo"), conf); @@ -108,12 +109,12 @@ public void testCreation() throws Exception { providers = ((LoadBalancingKMSClientProvider) kp).getProviders(); assertEquals(3, providers.length); - assertEquals(Sets.newHashSet("http://host1:9600/kms/foo/v1/", + assertEquals(new HashSet<>(Arrays.asList("http://host1:9600/kms/foo/v1/", "http://host2:9600/kms/foo/v1/", - "http://host3:9600/kms/foo/v1/"), - Sets.newHashSet(providers[0].getKMSUrl(), + "http://host3:9600/kms/foo/v1/")), + new HashSet<>(Arrays.asList(providers[0].getKMSUrl(), providers[1].getKMSUrl(), - providers[2].getKMSUrl())); + providers[2].getKMSUrl()))); } @Test diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestCommandFormat.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestCommandFormat.java index 4b855c4940..084c6a0aef 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestCommandFormat.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestCommandFormat.java @@ -43,9 +43,9 @@ public class TestCommandFormat { @Before public void setUp() { - args = new ArrayList(); - expectedOpts = new HashSet(); - expectedArgs = new ArrayList(); + args = new ArrayList<>(); + expectedOpts = new HashSet<>(); + expectedArgs = new ArrayList<>(); } @Test @@ -205,6 +205,6 @@ private static List listOf(String ... objects) { } private static Set setOf(String ... objects) { - return new HashSet(listOf(objects)); + return new HashSet<>(listOf(objects)); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java index 6415df6310..471d2458f4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java @@ -246,7 +246,7 @@ public void testListLocatedStatus() throws Exception { // test.har has the following contents: // dir1/1.txt // dir1/2.txt - Set expectedFileNames = new HashSet(); + Set expectedFileNames = new HashSet<>(); expectedFileNames.add("1.txt"); expectedFileNames.add("2.txt"); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java index 44308ea6fc..dce3b956d4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java @@ -152,7 +152,7 @@ public void testDirectory() throws IOException { writeFile(fs, FILE1, FILE_LEN); writeFile(fs, FILE3, FILE_LEN); - Set filesToFind = new HashSet(); + Set filesToFind = new HashSet<>(); filesToFind.add(fs.makeQualified(FILE1)); filesToFind.add(fs.makeQualified(FILE2)); filesToFind.add(fs.makeQualified(FILE3)); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java index 72287782ba..5b8c10b3fa 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java @@ -747,7 +747,7 @@ public void testTrashEmptier() throws Exception { Path myPath = new Path(TEST_DIR, "test/mkdirs"); mkdir(fs, myPath); int fileIndex = 0; - Set checkpoints = new HashSet(); + Set checkpoints = new HashSet<>(); while (true) { // Create a file with a new name Path myFile = new Path(TEST_DIR, "test/mkdirs/myFile" + fileIndex++); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestIOUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestIOUtils.java index fca72d9c65..51f207f97a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestIOUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestIOUtils.java @@ -275,7 +275,7 @@ public void testListDirectory() throws IOException { File dir = new File("testListDirectory"); Files.createDirectory(dir.toPath()); try { - Set entries = new HashSet(); + Set entries = new HashSet<>(); entries.add("entry1"); entries.add("entry2"); entries.add("entry3"); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecPool.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecPool.java index 1fb25cb908..ec99598e79 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecPool.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecPool.java @@ -69,7 +69,7 @@ public void testCompressorNotReturnSameInstance() { Compressor comp = CodecPool.getCompressor(codec); CodecPool.returnCompressor(comp); CodecPool.returnCompressor(comp); - Set compressors = new HashSet(); + Set compressors = new HashSet<>(); for (int i = 0; i < 10; ++i) { compressors.add(CodecPool.getCompressor(codec)); } @@ -180,7 +180,7 @@ public void testDecompressorNotReturnSameInstance() { Decompressor decomp = CodecPool.getDecompressor(codec); CodecPool.returnDecompressor(decomp); CodecPool.returnDecompressor(decomp); - Set decompressors = new HashSet(); + Set decompressors = new HashSet<>(); for (int i = 0; i < 10; ++i) { decompressors.add(CodecPool.getDecompressor(codec)); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java index f1bf4bb91e..61d5938494 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java @@ -39,6 +39,7 @@ import java.util.Random; import java.util.Set; import java.util.Enumeration; +import java.util.TreeSet; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -72,7 +73,6 @@ import org.slf4j.LoggerFactory; import org.apache.hadoop.thirdparty.com.google.common.base.Joiner; -import org.apache.hadoop.util.Sets; import static org.apache.hadoop.fs.contract.ContractTestUtils.createFile; import static org.apache.hadoop.util.functional.CommonCallableSupplier.submit; @@ -344,13 +344,13 @@ public static void assertExists(File f) { public static void assertGlobEquals(File dir, String pattern, String ... expectedMatches) throws IOException { - Set found = Sets.newTreeSet(); + Set found = new TreeSet<>(); for (File f : FileUtil.listFiles(dir)) { if (f.getName().matches(pattern)) { found.add(f.getName()); } } - Set expectedSet = Sets.newTreeSet( + Set expectedSet = new TreeSet<>( Arrays.asList(expectedMatches)); Assert.assertEquals("Bad files matching " + pattern + " in " + dir, Joiner.on(",").join(expectedSet), diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MultithreadedTestUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MultithreadedTestUtil.java index 217c2f84eb..e270ee6800 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MultithreadedTestUtil.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MultithreadedTestUtil.java @@ -70,8 +70,8 @@ public abstract class MultithreadedTestUtil { public static class TestContext { private Throwable err = null; private boolean stopped = false; - private Set testThreads = new HashSet(); - private Set finishedThreads = new HashSet(); + private Set testThreads = new HashSet<>(); + private Set finishedThreads = new HashSet<>(); /** * Check if the context can run threads. diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAudit.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAudit.java index 14c2ae907b..d71172e1b9 100644 --- a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAudit.java +++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAudit.java @@ -36,9 +36,9 @@ import org.apache.hadoop.thirdparty.com.google.common.cache.CacheBuilder; import org.apache.hadoop.thirdparty.com.google.common.cache.RemovalListener; import org.apache.hadoop.thirdparty.com.google.common.cache.RemovalNotification; -import org.apache.hadoop.util.Sets; import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; +import java.util.Arrays; import java.util.HashSet; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; @@ -56,10 +56,10 @@ */ public class KMSAudit { @VisibleForTesting - static final Set AGGREGATE_OPS_WHITELIST = Sets.newHashSet( + static final Set AGGREGATE_OPS_WHITELIST = new HashSet<>(Arrays.asList( KMS.KMSOp.GET_KEY_VERSION, KMS.KMSOp.GET_CURRENT_KEY, KMS.KMSOp.DECRYPT_EEK, KMS.KMSOp.GENERATE_EEK, KMS.KMSOp.REENCRYPT_EEK - ); + )); private Cache cache;