HADOOP-18266. Using HashSet/ TreeSet Constructor for hadoop-common (#4365)

* HADOOP-18266. Using HashSet/ TreeSet Constructor for hadoop-common

Co-authored-by: Deb <dbsamrat@3c22fba1b03f.ant.amazon.com>
This commit is contained in:
Samrat 2022-06-20 12:11:04 +05:30 committed by GitHub
parent efc2761d32
commit 477b67a335
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
37 changed files with 69 additions and 69 deletions

View File

@ -236,7 +236,7 @@ public static final String getServicePrincipal(String service,
*/ */
static final String[] getPrincipalNames(String keytabFileName) throws IOException { static final String[] getPrincipalNames(String keytabFileName) throws IOException {
Keytab keytab = Keytab.loadKeytab(new File(keytabFileName)); Keytab keytab = Keytab.loadKeytab(new File(keytabFileName));
Set<String> principals = new HashSet<String>(); Set<String> principals = new HashSet<>();
List<PrincipalName> entries = keytab.getPrincipals(); List<PrincipalName> entries = keytab.getPrincipals();
for (PrincipalName entry : entries) { for (PrincipalName entry : entries) {
principals.add(entry.getName().replace("\\", "/")); principals.add(entry.getName().replace("\\", "/"));

View File

@ -108,9 +108,9 @@ public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
public static <T> T doAs(String principal, final Callable<T> callable) throws Exception { public static <T> T doAs(String principal, final Callable<T> callable) throws Exception {
LoginContext loginContext = null; LoginContext loginContext = null;
try { try {
Set<Principal> principals = new HashSet<Principal>(); Set<Principal> principals = new HashSet<>();
principals.add(new KerberosPrincipal(KerberosTestUtils.getClientPrincipal())); principals.add(new KerberosPrincipal(KerberosTestUtils.getClientPrincipal()));
Subject subject = new Subject(false, principals, new HashSet<Object>(), new HashSet<Object>()); Subject subject = new Subject(false, principals, new HashSet<>(), new HashSet<>());
loginContext = new LoginContext("", subject, null, new KerberosConfiguration(principal)); loginContext = new LoginContext("", subject, null, new KerberosConfiguration(principal));
loginContext.login(); loginContext.login();
subject = loginContext.getSubject(); subject = loginContext.getSubject();

View File

@ -774,7 +774,7 @@ private void updatePropertiesWithDeprecatedKeys(
private void handleDeprecation() { private void handleDeprecation() {
LOG.debug("Handling deprecation for all properties in config..."); LOG.debug("Handling deprecation for all properties in config...");
DeprecationContext deprecations = deprecationContext.get(); DeprecationContext deprecations = deprecationContext.get();
Set<Object> keys = new HashSet<Object>(); Set<Object> keys = new HashSet<>();
keys.addAll(getProps().keySet()); keys.addAll(getProps().keySet());
for (Object item: keys) { for (Object item: keys) {
LOG.debug("Handling deprecation for " + (String)item); LOG.debug("Handling deprecation for " + (String)item);

View File

@ -2372,8 +2372,7 @@ public FileStatus next(final AbstractFileSystem fs, final Path p)
Set<AbstractFileSystem> resolveAbstractFileSystems(final Path f) Set<AbstractFileSystem> resolveAbstractFileSystems(final Path f)
throws IOException { throws IOException {
final Path absF = fixRelativePart(f); final Path absF = fixRelativePart(f);
final HashSet<AbstractFileSystem> result final HashSet<AbstractFileSystem> result = new HashSet<>();
= new HashSet<AbstractFileSystem>();
new FSLinkResolver<Void>() { new FSLinkResolver<Void>() {
@Override @Override
public Void next(final AbstractFileSystem fs, final Path p) public Void next(final AbstractFileSystem fs, final Path p)

View File

@ -76,7 +76,7 @@ synchronized void returnToPool(ChannelSftp channel) {
ConnectionInfo info = con2infoMap.get(channel); ConnectionInfo info = con2infoMap.get(channel);
HashSet<ChannelSftp> cons = idleConnections.get(info); HashSet<ChannelSftp> cons = idleConnections.get(info);
if (cons == null) { if (cons == null) {
cons = new HashSet<ChannelSftp>(); cons = new HashSet<>();
idleConnections.put(info, cons); idleConnections.put(info, cons);
} }
cons.add(channel); cons.add(channel);
@ -94,7 +94,7 @@ synchronized void shutdown() {
Set<ChannelSftp> cons = con2infoMap.keySet(); Set<ChannelSftp> cons = con2infoMap.keySet();
if (cons != null && cons.size() > 0) { if (cons != null && cons.size() > 0) {
// make a copy since we need to modify the underlying Map // make a copy since we need to modify the underlying Map
Set<ChannelSftp> copy = new HashSet<ChannelSftp>(cons); Set<ChannelSftp> copy = new HashSet<>(cons);
// Initiate disconnect from all outstanding connections // Initiate disconnect from all outstanding connections
for (ChannelSftp con : copy) { for (ChannelSftp con : copy) {
try { try {

View File

@ -165,7 +165,7 @@ public String getOptValue(String option) {
* @return Set{@literal <}String{@literal >} of the enabled options * @return Set{@literal <}String{@literal >} of the enabled options
*/ */
public Set<String> getOpts() { public Set<String> getOpts() {
Set<String> optSet = new HashSet<String>(); Set<String> optSet = new HashSet<>();
for (Map.Entry<String, Boolean> entry : options.entrySet()) { for (Map.Entry<String, Boolean> entry : options.entrySet()) {
if (entry.getValue()) { if (entry.getValue()) {
optSet.add(entry.getKey()); optSet.add(entry.getKey());

View File

@ -96,7 +96,7 @@ private static void addExpression(Class<?> clazz) {
private Expression rootExpression; private Expression rootExpression;
/** Set of path items returning a {@link Result#STOP} result. */ /** Set of path items returning a {@link Result#STOP} result. */
private HashSet<Path> stopPaths = new HashSet<Path>(); private HashSet<Path> stopPaths = new HashSet<>();
/** Register the expressions with the expression factory. */ /** Register the expressions with the expression factory. */
private static void registerExpressions(ExpressionFactory factory) { private static void registerExpressions(ExpressionFactory factory) {

View File

@ -1037,7 +1037,7 @@ public FileSystem[] getChildFileSystems() {
List<InodeTree.MountPoint<FileSystem>> mountPoints = List<InodeTree.MountPoint<FileSystem>> mountPoints =
fsState.getMountPoints(); fsState.getMountPoints();
Map<String, FileSystem> fsMap = initializeMountedFileSystems(mountPoints); Map<String, FileSystem> fsMap = initializeMountedFileSystems(mountPoints);
Set<FileSystem> children = new HashSet<FileSystem>(); Set<FileSystem> children = new HashSet<>();
for (InodeTree.MountPoint<FileSystem> mountPoint : mountPoints) { for (InodeTree.MountPoint<FileSystem> mountPoint : mountPoints) {
FileSystem targetFs = fsMap.get(mountPoint.src); FileSystem targetFs = fsMap.get(mountPoint.src);
children.addAll(Arrays.asList(targetFs.getChildFileSystems())); children.addAll(Arrays.asList(targetFs.getChildFileSystems()));

View File

@ -109,7 +109,7 @@ private static <T> boolean payback(Map<Class<T>, Set<T>> pool, T codec) {
synchronized (pool) { synchronized (pool) {
codecSet = pool.get(codecClass); codecSet = pool.get(codecClass);
if (codecSet == null) { if (codecSet == null) {
codecSet = new HashSet<T>(); codecSet = new HashSet<>();
pool.put(codecClass, codecSet); pool.put(codecClass, codecSet);
} }
} }

View File

@ -64,7 +64,7 @@ public synchronized boolean accept(Class<?> c) {
private void getPackages() { private void getPackages() {
String[] pkgList = getConf().getStrings(AVRO_REFLECT_PACKAGES); String[] pkgList = getConf().getStrings(AVRO_REFLECT_PACKAGES);
packages = new HashSet<String>(); packages = new HashSet<>();
if (pkgList != null) { if (pkgList != null) {
for (String pkg : pkgList) { for (String pkg : pkgList) {
packages.add(pkg.trim()); packages.add(pkg.trim());

View File

@ -69,7 +69,7 @@ private void fetchServerMethods(Method method) throws IOException {
} }
int[] serverMethodsCodes = serverInfo.getMethods(); int[] serverMethodsCodes = serverInfo.getMethods();
if (serverMethodsCodes != null) { if (serverMethodsCodes != null) {
serverMethods = new HashSet<Integer>(serverMethodsCodes.length); serverMethods = new HashSet<>(serverMethodsCodes.length);
for (int m : serverMethodsCodes) { for (int m : serverMethodsCodes) {
this.serverMethods.add(Integer.valueOf(m)); this.serverMethods.add(Integer.valueOf(m));
} }

View File

@ -19,11 +19,10 @@
package org.apache.hadoop.metrics2.lib; package org.apache.hadoop.metrics2.lib;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.HashSet;
import java.util.Set; import java.util.Set;
import static org.apache.hadoop.util.Preconditions.*; import static org.apache.hadoop.util.Preconditions.*;
import org.apache.hadoop.util.Sets;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.MetricsRecordBuilder;
@ -44,7 +43,7 @@
public class MutableRates extends MutableMetric { public class MutableRates extends MutableMetric {
static final Logger LOG = LoggerFactory.getLogger(MutableRates.class); static final Logger LOG = LoggerFactory.getLogger(MutableRates.class);
private final MetricsRegistry registry; private final MetricsRegistry registry;
private final Set<Class<?>> protocolCache = Sets.newHashSet(); private final Set<Class<?>> protocolCache = new HashSet<>();
MutableRates(MetricsRegistry registry) { MutableRates(MetricsRegistry registry) {
this.registry = checkNotNull(registry, "metrics registry"); this.registry = checkNotNull(registry, "metrics registry");

View File

@ -18,9 +18,9 @@
package org.apache.hadoop.metrics2.lib; package org.apache.hadoop.metrics2.lib;
import org.apache.hadoop.util.Sets;
import java.lang.ref.WeakReference; import java.lang.ref.WeakReference;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
@ -52,7 +52,7 @@ public class MutableRatesWithAggregation extends MutableMetric {
LoggerFactory.getLogger(MutableRatesWithAggregation.class); LoggerFactory.getLogger(MutableRatesWithAggregation.class);
private final Map<String, MutableRate> globalMetrics = private final Map<String, MutableRate> globalMetrics =
new ConcurrentHashMap<>(); new ConcurrentHashMap<>();
private final Set<Class<?>> protocolCache = Sets.newHashSet(); private final Set<Class<?>> protocolCache = new HashSet<>();
private final ConcurrentLinkedDeque<WeakReference<ConcurrentMap<String, ThreadSafeSampleStat>>> private final ConcurrentLinkedDeque<WeakReference<ConcurrentMap<String, ThreadSafeSampleStat>>>
weakReferenceQueue = new ConcurrentLinkedDeque<>(); weakReferenceQueue = new ConcurrentLinkedDeque<>();

View File

@ -115,7 +115,7 @@ public String dumpTopology() {
builder.append("Mapping: ").append(toString()).append("\n"); builder.append("Mapping: ").append(toString()).append("\n");
if (rack != null) { if (rack != null) {
builder.append("Map:\n"); builder.append("Map:\n");
Set<String> switches = new HashSet<String>(); Set<String> switches = new HashSet<>();
for (Map.Entry<String, String> entry : rack.entrySet()) { for (Map.Entry<String, String> entry : rack.entrySet()) {
builder.append(" ") builder.append(" ")
.append(entry.getKey()) .append(entry.getKey())

View File

@ -1086,7 +1086,7 @@ private void interAddNodeWithEmptyRack(Node node) {
String rackname = node.getNetworkLocation(); String rackname = node.getNetworkLocation();
Set<String> nodes = rackMap.get(rackname); Set<String> nodes = rackMap.get(rackname);
if (nodes == null) { if (nodes == null) {
nodes = new HashSet<String>(); nodes = new HashSet<>();
} }
if (!decommissionNodes.contains(node.getName())) { if (!decommissionNodes.contains(node.getName())) {
nodes.add(node.getName()); nodes.add(node.getName());

View File

@ -109,7 +109,7 @@ public void cacheGroupsAdd(List<String> groups) throws IOException {
@Override @Override
public synchronized Set<String> getGroupsSet(String user) throws IOException { public synchronized Set<String> getGroupsSet(String user) throws IOException {
Set<String> groupSet = new HashSet<String>(); Set<String> groupSet = new HashSet<>();
Set<String> groups = null; Set<String> groups = null;
for (GroupMappingServiceProvider provider : providersList) { for (GroupMappingServiceProvider provider : providersList) {

View File

@ -65,7 +65,7 @@ public static List<String> getNetgroupNames() {
} }
private static Set<String> getGroups() { private static Set<String> getGroups() {
Set<String> allGroups = new HashSet<String> (); Set<String> allGroups = new HashSet<>();
for (Set<String> userGroups : userToNetgroupsMap.values()) { for (Set<String> userGroups : userToNetgroupsMap.values()) {
allGroups.addAll(userGroups); allGroups.addAll(userGroups);
} }

View File

@ -105,8 +105,8 @@ public AccessControlList(String users, String groups) {
* @param userGroupStrings build ACL from array of Strings * @param userGroupStrings build ACL from array of Strings
*/ */
private void buildACL(String[] userGroupStrings) { private void buildACL(String[] userGroupStrings) {
users = new HashSet<String>(); users = new HashSet<>();
groups = new HashSet<String>(); groups = new HashSet<>();
for (String aclPart : userGroupStrings) { for (String aclPart : userGroupStrings) {
if (aclPart != null && isWildCardACLValue(aclPart)) { if (aclPart != null && isWildCardACLValue(aclPart)) {
allAllowed = true; allAllowed = true;

View File

@ -33,7 +33,7 @@ public static void refresh() {
} }
public static void refresh(Configuration conf){ public static void refresh(Configuration conf){
Collection<String> tempServers = new HashSet<String>(); Collection<String> tempServers = new HashSet<>();
// trusted proxy servers such as http proxies // trusted proxy servers such as http proxies
for (String host : conf.getTrimmedStrings(CONF_HADOOP_PROXYSERVERS)) { for (String host : conf.getTrimmedStrings(CONF_HADOOP_PROXYSERVERS)) {
InetSocketAddress addr = new InetSocketAddress(host, 0); InetSocketAddress addr = new InetSocketAddress(host, 0);

View File

@ -94,7 +94,7 @@ public void init(FilterConfig filterConfig) throws ServletException {
void parseBrowserUserAgents(String userAgents) { void parseBrowserUserAgents(String userAgents) {
String[] agentsArray = userAgents.split(","); String[] agentsArray = userAgents.split(",");
browserUserAgents = new HashSet<Pattern>(); browserUserAgents = new HashSet<>();
for (String patternString : agentsArray) { for (String patternString : agentsArray) {
browserUserAgents.add(Pattern.compile(patternString)); browserUserAgents.add(Pattern.compile(patternString));
} }
@ -102,7 +102,7 @@ void parseBrowserUserAgents(String userAgents) {
void parseMethodsToIgnore(String mti) { void parseMethodsToIgnore(String mti) {
String[] methods = mti.split(","); String[] methods = mti.split(",");
methodsToIgnore = new HashSet<String>(); methodsToIgnore = new HashSet<>();
for (int i = 0; i < methods.length; i++) { for (int i = 0; i < methods.length; i++) {
methodsToIgnore.add(methods[i]); methodsToIgnore.add(methods[i]);
} }

View File

@ -716,7 +716,7 @@ public String getTrackingId() {
/** Remove expired delegation tokens from cache */ /** Remove expired delegation tokens from cache */
private void removeExpiredToken() throws IOException { private void removeExpiredToken() throws IOException {
long now = Time.now(); long now = Time.now();
Set<TokenIdent> expiredTokens = new HashSet<TokenIdent>(); Set<TokenIdent> expiredTokens = new HashSet<>();
synchronized (this) { synchronized (this) {
Iterator<Map.Entry<TokenIdent, DelegationTokenInformation>> i = Iterator<Map.Entry<TokenIdent, DelegationTokenInformation>> i =
currentTokens.entrySet().iterator(); currentTokens.entrySet().iterator();

View File

@ -89,7 +89,7 @@ public abstract class DelegationTokenAuthenticationHandler
public static final String TOKEN_KIND = PREFIX + "token-kind"; public static final String TOKEN_KIND = PREFIX + "token-kind";
private static final Set<String> DELEGATION_TOKEN_OPS = new HashSet<String>(); private static final Set<String> DELEGATION_TOKEN_OPS = new HashSet<>();
public static final String DELEGATION_TOKEN_UGI_ATTRIBUTE = public static final String DELEGATION_TOKEN_UGI_ATTRIBUTE =
"hadoop.security.delegation-token.ugi"; "hadoop.security.delegation-token.ugi";

View File

@ -58,7 +58,7 @@ public FileBasedIPList(String fileName) {
lines = null; lines = null;
} }
if (lines != null) { if (lines != null) {
addressList = new MachineList(new HashSet<String>(Arrays.asList(lines))); addressList = new MachineList(new HashSet<>(Arrays.asList(lines)));
} else { } else {
addressList = null; addressList = null;
} }

View File

@ -135,7 +135,7 @@ public static void readFileToMapWithFileInputStream(String type,
if (xmlInput) { if (xmlInput) {
readXmlFileToMapWithFileInputStream(type, filename, inputStream, map); readXmlFileToMapWithFileInputStream(type, filename, inputStream, map);
} else { } else {
HashSet<String> nodes = new HashSet<String>(); HashSet<String> nodes = new HashSet<>();
readFileToSetWithFileInputStream(type, filename, inputStream, nodes); readFileToSetWithFileInputStream(type, filename, inputStream, nodes);
for (String node : nodes) { for (String node : nodes) {
map.put(node, null); map.put(node, null);

View File

@ -249,7 +249,7 @@ TimeUnit getTimeUnit() {
} }
private final Set<HookEntry> hooks = private final Set<HookEntry> hooks =
Collections.synchronizedSet(new HashSet<HookEntry>()); Collections.synchronizedSet(new HashSet<>());
private AtomicBoolean shutdownInProgress = new AtomicBoolean(false); private AtomicBoolean shutdownInProgress = new AtomicBoolean(false);

View File

@ -80,9 +80,9 @@ public void initializeMemberVariables() {
}; };
// Initialize used variables // Initialize used variables
xmlPropsToSkipCompare = new HashSet<String>(); xmlPropsToSkipCompare = new HashSet<>();
xmlPrefixToSkipCompare = new HashSet<String>(); xmlPrefixToSkipCompare = new HashSet<>();
configurationPropsToSkipCompare = new HashSet<String>(); configurationPropsToSkipCompare = new HashSet<>();
// Set error modes // Set error modes
errorIfMissingConfigProps = true; errorIfMissingConfigProps = true;

View File

@ -18,6 +18,8 @@
package org.apache.hadoop.crypto.key; package org.apache.hadoop.crypto.key;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Queue; import java.util.Queue;
import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
@ -32,7 +34,6 @@
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.apache.hadoop.util.Sets;
public class TestValueQueue { public class TestValueQueue {
Logger LOG = LoggerFactory.getLogger(TestValueQueue.class); Logger LOG = LoggerFactory.getLogger(TestValueQueue.class);
@ -103,10 +104,10 @@ public void testWarmUp() throws Exception {
Assert.assertEquals(5, fillInfos[0].num); Assert.assertEquals(5, fillInfos[0].num);
Assert.assertEquals(5, fillInfos[1].num); Assert.assertEquals(5, fillInfos[1].num);
Assert.assertEquals(5, fillInfos[2].num); Assert.assertEquals(5, fillInfos[2].num);
Assert.assertEquals(Sets.newHashSet("k1", "k2", "k3"), Assert.assertEquals(new HashSet<>(Arrays.asList("k1", "k2", "k3")),
Sets.newHashSet(fillInfos[0].key, new HashSet<>(Arrays.asList(fillInfos[0].key,
fillInfos[1].key, fillInfos[1].key,
fillInfos[2].key)); fillInfos[2].key)));
vq.shutdown(); vq.shutdown();
} }

View File

@ -39,6 +39,8 @@
import java.security.NoSuchAlgorithmException; import java.security.NoSuchAlgorithmException;
import java.security.PrivilegedExceptionAction; import java.security.PrivilegedExceptionAction;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
@ -65,7 +67,6 @@
import org.junit.rules.Timeout; import org.junit.rules.Timeout;
import org.mockito.Mockito; import org.mockito.Mockito;
import org.apache.hadoop.util.Sets;
public class TestLoadBalancingKMSClientProvider { public class TestLoadBalancingKMSClientProvider {
@ -86,8 +87,8 @@ public void testCreation() throws Exception {
KMSClientProvider[] providers = KMSClientProvider[] providers =
((LoadBalancingKMSClientProvider) kp).getProviders(); ((LoadBalancingKMSClientProvider) kp).getProviders();
assertEquals(1, providers.length); assertEquals(1, providers.length);
assertEquals(Sets.newHashSet("http://host1:9600/kms/foo/v1/"), assertEquals(new HashSet<>(Collections.singleton("http://host1:9600/kms/foo/v1/")),
Sets.newHashSet(providers[0].getKMSUrl())); new HashSet<>(Collections.singleton(providers[0].getKMSUrl())));
kp = new KMSClientProvider.Factory().createProvider(new URI( kp = new KMSClientProvider.Factory().createProvider(new URI(
"kms://http@host1;host2;host3:9600/kms/foo"), conf); "kms://http@host1;host2;host3:9600/kms/foo"), conf);
@ -95,12 +96,12 @@ public void testCreation() throws Exception {
providers = providers =
((LoadBalancingKMSClientProvider) kp).getProviders(); ((LoadBalancingKMSClientProvider) kp).getProviders();
assertEquals(3, providers.length); assertEquals(3, providers.length);
assertEquals(Sets.newHashSet("http://host1:9600/kms/foo/v1/", assertEquals(new HashSet<>(Arrays.asList("http://host1:9600/kms/foo/v1/",
"http://host2:9600/kms/foo/v1/", "http://host2:9600/kms/foo/v1/",
"http://host3:9600/kms/foo/v1/"), "http://host3:9600/kms/foo/v1/")),
Sets.newHashSet(providers[0].getKMSUrl(), new HashSet<>(Arrays.asList(providers[0].getKMSUrl(),
providers[1].getKMSUrl(), providers[1].getKMSUrl(),
providers[2].getKMSUrl())); providers[2].getKMSUrl())));
kp = new KMSClientProvider.Factory().createProvider(new URI( kp = new KMSClientProvider.Factory().createProvider(new URI(
"kms://http@host1;host2;host3:9600/kms/foo"), conf); "kms://http@host1;host2;host3:9600/kms/foo"), conf);
@ -108,12 +109,12 @@ public void testCreation() throws Exception {
providers = providers =
((LoadBalancingKMSClientProvider) kp).getProviders(); ((LoadBalancingKMSClientProvider) kp).getProviders();
assertEquals(3, providers.length); assertEquals(3, providers.length);
assertEquals(Sets.newHashSet("http://host1:9600/kms/foo/v1/", assertEquals(new HashSet<>(Arrays.asList("http://host1:9600/kms/foo/v1/",
"http://host2:9600/kms/foo/v1/", "http://host2:9600/kms/foo/v1/",
"http://host3:9600/kms/foo/v1/"), "http://host3:9600/kms/foo/v1/")),
Sets.newHashSet(providers[0].getKMSUrl(), new HashSet<>(Arrays.asList(providers[0].getKMSUrl(),
providers[1].getKMSUrl(), providers[1].getKMSUrl(),
providers[2].getKMSUrl())); providers[2].getKMSUrl())));
} }
@Test @Test

View File

@ -43,9 +43,9 @@ public class TestCommandFormat {
@Before @Before
public void setUp() { public void setUp() {
args = new ArrayList<String>(); args = new ArrayList<>();
expectedOpts = new HashSet<String>(); expectedOpts = new HashSet<>();
expectedArgs = new ArrayList<String>(); expectedArgs = new ArrayList<>();
} }
@Test @Test
@ -205,6 +205,6 @@ private static List<String> listOf(String ... objects) {
} }
private static Set<String> setOf(String ... objects) { private static Set<String> setOf(String ... objects) {
return new HashSet<String>(listOf(objects)); return new HashSet<>(listOf(objects));
} }
} }

View File

@ -246,7 +246,7 @@ public void testListLocatedStatus() throws Exception {
// test.har has the following contents: // test.har has the following contents:
// dir1/1.txt // dir1/1.txt
// dir1/2.txt // dir1/2.txt
Set<String> expectedFileNames = new HashSet<String>(); Set<String> expectedFileNames = new HashSet<>();
expectedFileNames.add("1.txt"); expectedFileNames.add("1.txt");
expectedFileNames.add("2.txt"); expectedFileNames.add("2.txt");

View File

@ -152,7 +152,7 @@ public void testDirectory() throws IOException {
writeFile(fs, FILE1, FILE_LEN); writeFile(fs, FILE1, FILE_LEN);
writeFile(fs, FILE3, FILE_LEN); writeFile(fs, FILE3, FILE_LEN);
Set<Path> filesToFind = new HashSet<Path>(); Set<Path> filesToFind = new HashSet<>();
filesToFind.add(fs.makeQualified(FILE1)); filesToFind.add(fs.makeQualified(FILE1));
filesToFind.add(fs.makeQualified(FILE2)); filesToFind.add(fs.makeQualified(FILE2));
filesToFind.add(fs.makeQualified(FILE3)); filesToFind.add(fs.makeQualified(FILE3));

View File

@ -747,7 +747,7 @@ public void testTrashEmptier() throws Exception {
Path myPath = new Path(TEST_DIR, "test/mkdirs"); Path myPath = new Path(TEST_DIR, "test/mkdirs");
mkdir(fs, myPath); mkdir(fs, myPath);
int fileIndex = 0; int fileIndex = 0;
Set<String> checkpoints = new HashSet<String>(); Set<String> checkpoints = new HashSet<>();
while (true) { while (true) {
// Create a file with a new name // Create a file with a new name
Path myFile = new Path(TEST_DIR, "test/mkdirs/myFile" + fileIndex++); Path myFile = new Path(TEST_DIR, "test/mkdirs/myFile" + fileIndex++);

View File

@ -275,7 +275,7 @@ public void testListDirectory() throws IOException {
File dir = new File("testListDirectory"); File dir = new File("testListDirectory");
Files.createDirectory(dir.toPath()); Files.createDirectory(dir.toPath());
try { try {
Set<String> entries = new HashSet<String>(); Set<String> entries = new HashSet<>();
entries.add("entry1"); entries.add("entry1");
entries.add("entry2"); entries.add("entry2");
entries.add("entry3"); entries.add("entry3");

View File

@ -69,7 +69,7 @@ public void testCompressorNotReturnSameInstance() {
Compressor comp = CodecPool.getCompressor(codec); Compressor comp = CodecPool.getCompressor(codec);
CodecPool.returnCompressor(comp); CodecPool.returnCompressor(comp);
CodecPool.returnCompressor(comp); CodecPool.returnCompressor(comp);
Set<Compressor> compressors = new HashSet<Compressor>(); Set<Compressor> compressors = new HashSet<>();
for (int i = 0; i < 10; ++i) { for (int i = 0; i < 10; ++i) {
compressors.add(CodecPool.getCompressor(codec)); compressors.add(CodecPool.getCompressor(codec));
} }
@ -180,7 +180,7 @@ public void testDecompressorNotReturnSameInstance() {
Decompressor decomp = CodecPool.getDecompressor(codec); Decompressor decomp = CodecPool.getDecompressor(codec);
CodecPool.returnDecompressor(decomp); CodecPool.returnDecompressor(decomp);
CodecPool.returnDecompressor(decomp); CodecPool.returnDecompressor(decomp);
Set<Decompressor> decompressors = new HashSet<Decompressor>(); Set<Decompressor> decompressors = new HashSet<>();
for (int i = 0; i < 10; ++i) { for (int i = 0; i < 10; ++i) {
decompressors.add(CodecPool.getDecompressor(codec)); decompressors.add(CodecPool.getDecompressor(codec));
} }

View File

@ -39,6 +39,7 @@
import java.util.Random; import java.util.Random;
import java.util.Set; import java.util.Set;
import java.util.Enumeration; import java.util.Enumeration;
import java.util.TreeSet;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
@ -72,7 +73,6 @@
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner; import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
import org.apache.hadoop.util.Sets;
import static org.apache.hadoop.fs.contract.ContractTestUtils.createFile; import static org.apache.hadoop.fs.contract.ContractTestUtils.createFile;
import static org.apache.hadoop.util.functional.CommonCallableSupplier.submit; import static org.apache.hadoop.util.functional.CommonCallableSupplier.submit;
@ -344,13 +344,13 @@ public static void assertExists(File f) {
public static void assertGlobEquals(File dir, String pattern, public static void assertGlobEquals(File dir, String pattern,
String ... expectedMatches) throws IOException { String ... expectedMatches) throws IOException {
Set<String> found = Sets.newTreeSet(); Set<String> found = new TreeSet<>();
for (File f : FileUtil.listFiles(dir)) { for (File f : FileUtil.listFiles(dir)) {
if (f.getName().matches(pattern)) { if (f.getName().matches(pattern)) {
found.add(f.getName()); found.add(f.getName());
} }
} }
Set<String> expectedSet = Sets.newTreeSet( Set<String> expectedSet = new TreeSet<>(
Arrays.asList(expectedMatches)); Arrays.asList(expectedMatches));
Assert.assertEquals("Bad files matching " + pattern + " in " + dir, Assert.assertEquals("Bad files matching " + pattern + " in " + dir,
Joiner.on(",").join(expectedSet), Joiner.on(",").join(expectedSet),

View File

@ -70,8 +70,8 @@ public abstract class MultithreadedTestUtil {
public static class TestContext { public static class TestContext {
private Throwable err = null; private Throwable err = null;
private boolean stopped = false; private boolean stopped = false;
private Set<TestingThread> testThreads = new HashSet<TestingThread>(); private Set<TestingThread> testThreads = new HashSet<>();
private Set<TestingThread> finishedThreads = new HashSet<TestingThread>(); private Set<TestingThread> finishedThreads = new HashSet<>();
/** /**
* Check if the context can run threads. * Check if the context can run threads.

View File

@ -36,9 +36,9 @@
import org.apache.hadoop.thirdparty.com.google.common.cache.CacheBuilder; import org.apache.hadoop.thirdparty.com.google.common.cache.CacheBuilder;
import org.apache.hadoop.thirdparty.com.google.common.cache.RemovalListener; import org.apache.hadoop.thirdparty.com.google.common.cache.RemovalListener;
import org.apache.hadoop.thirdparty.com.google.common.cache.RemovalNotification; import org.apache.hadoop.thirdparty.com.google.common.cache.RemovalNotification;
import org.apache.hadoop.util.Sets;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.util.Arrays;
import java.util.HashSet; import java.util.HashSet;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
@ -56,10 +56,10 @@
*/ */
public class KMSAudit { public class KMSAudit {
@VisibleForTesting @VisibleForTesting
static final Set<KMS.KMSOp> AGGREGATE_OPS_WHITELIST = Sets.newHashSet( static final Set<KMS.KMSOp> AGGREGATE_OPS_WHITELIST = new HashSet<>(Arrays.asList(
KMS.KMSOp.GET_KEY_VERSION, KMS.KMSOp.GET_CURRENT_KEY, KMS.KMSOp.GET_KEY_VERSION, KMS.KMSOp.GET_CURRENT_KEY,
KMS.KMSOp.DECRYPT_EEK, KMS.KMSOp.GENERATE_EEK, KMS.KMSOp.REENCRYPT_EEK KMS.KMSOp.DECRYPT_EEK, KMS.KMSOp.GENERATE_EEK, KMS.KMSOp.REENCRYPT_EEK
); ));
private Cache<String, AuditEvent> cache; private Cache<String, AuditEvent> cache;