HADOOP-17009: Embrace Immutability of Java Collections
This commit is contained in:
parent
3472c3efc0
commit
100ec8e870
@ -23,7 +23,6 @@
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
@ -1032,7 +1031,7 @@ public String getCanonicalServiceName() {
|
||||
*/
|
||||
@InterfaceAudience.LimitedPrivate( { "HDFS", "MapReduce" })
|
||||
public List<Token<?>> getDelegationTokens(String renewer) throws IOException {
|
||||
return new ArrayList<Token<?>>(0);
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -20,8 +20,7 @@
|
||||
import java.io.BufferedReader;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Collections;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.StringTokenizer;
|
||||
|
||||
@ -65,9 +64,7 @@ public Stat(Path path, long blockSize, boolean deref, FileSystem fs)
|
||||
this.blockSize = blockSize;
|
||||
this.dereference = deref;
|
||||
// LANG = C setting
|
||||
Map<String, String> env = new HashMap<String, String>();
|
||||
env.put("LANG", "C");
|
||||
setEnvironment(env);
|
||||
setEnvironment(Collections.singletonMap("LANG", "C"));
|
||||
}
|
||||
|
||||
public FileStatus getFileStatus() throws IOException {
|
||||
|
@ -20,6 +20,7 @@
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
@ -97,7 +98,7 @@ protected List<PathData> expandArgument(String arg) throws IOException {
|
||||
throw e;
|
||||
}
|
||||
// prevent -f on a non-existent glob from failing
|
||||
return new LinkedList<PathData>();
|
||||
return Collections.emptyList();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -19,9 +19,9 @@
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.PrintStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
@ -107,8 +107,7 @@ protected HAAdmin(Configuration conf) {
|
||||
protected abstract HAServiceTarget resolveTarget(String string);
|
||||
|
||||
protected Collection<String> getTargetIds(String targetNodeToActivate) {
|
||||
return new ArrayList<String>(
|
||||
Arrays.asList(new String[]{targetNodeToActivate}));
|
||||
return Collections.singleton(targetNodeToActivate);
|
||||
}
|
||||
|
||||
protected String getUsageString() {
|
||||
@ -188,8 +187,10 @@ private int transitionToActive(final CommandLine cmd)
|
||||
private boolean isOtherTargetNodeActive(String targetNodeToActivate, boolean forceActive)
|
||||
throws IOException {
|
||||
Collection<String> targetIds = getTargetIds(targetNodeToActivate);
|
||||
targetIds.remove(targetNodeToActivate);
|
||||
for(String targetId : targetIds) {
|
||||
for (String targetId : targetIds) {
|
||||
if (targetNodeToActivate.equals(targetId)) {
|
||||
continue;
|
||||
}
|
||||
HAServiceTarget target = resolveTarget(targetId);
|
||||
if (!checkManualStateManagementOK(target)) {
|
||||
return true;
|
||||
|
@ -19,7 +19,7 @@
|
||||
|
||||
import java.io.IOException;
|
||||
import java.security.Principal;
|
||||
import java.util.HashMap;
|
||||
import java.util.Collections;
|
||||
|
||||
import javax.servlet.FilterChain;
|
||||
import javax.servlet.FilterConfig;
|
||||
@ -121,14 +121,10 @@ public void init(FilterConfig conf) throws ServletException {
|
||||
|
||||
@Override
|
||||
public void initFilter(FilterContainer container, Configuration conf) {
|
||||
HashMap<String, String> options = new HashMap<String, String>();
|
||||
|
||||
String username = getUsernameFromConf(conf);
|
||||
options.put(HADOOP_HTTP_STATIC_USER, username);
|
||||
|
||||
container.addFilter("static_user_filter",
|
||||
StaticUserFilter.class.getName(),
|
||||
options);
|
||||
container.addFilter("static_user_filter", StaticUserFilter.class.getName(),
|
||||
Collections.singletonMap(HADOOP_HTTP_STATIC_USER, username));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -18,7 +18,7 @@
|
||||
package org.apache.hadoop.metrics2.util;
|
||||
|
||||
import java.lang.management.ManagementFactory;
|
||||
import java.util.HashMap;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
@ -70,8 +70,7 @@ private MBeans() {
|
||||
*/
|
||||
static public ObjectName register(String serviceName, String nameName,
|
||||
Object theMbean) {
|
||||
return register(serviceName, nameName, new HashMap<String, String>(),
|
||||
theMbean);
|
||||
return register(serviceName, nameName, Collections.emptyMap(), theMbean);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -130,8 +130,7 @@ public List<String> resolve(List<String> names) {
|
||||
*/
|
||||
@Override
|
||||
public Map<String, String> getSwitchMap() {
|
||||
Map<String, String > switchMap = new HashMap<String, String>(cache);
|
||||
return switchMap;
|
||||
return new HashMap<>(cache);
|
||||
}
|
||||
|
||||
|
||||
|
@ -196,10 +196,8 @@ public List<Node> getDatanodesInRack(String loc) {
|
||||
loc = loc.substring(1);
|
||||
}
|
||||
InnerNode rack = (InnerNode) clusterMap.getLoc(loc);
|
||||
if (rack == null) {
|
||||
return null;
|
||||
}
|
||||
return new ArrayList<Node>(rack.getChildren());
|
||||
return (rack == null) ? new ArrayList<>(0)
|
||||
: new ArrayList<>(rack.getChildren());
|
||||
} finally {
|
||||
netlock.readLock().unlock();
|
||||
}
|
||||
|
@ -25,6 +25,7 @@
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@ -130,7 +131,7 @@ public synchronized List<String> resolve(List<String> names) {
|
||||
if (map == null) {
|
||||
LOG.warn("Failed to read topology table. " +
|
||||
NetworkTopology.DEFAULT_RACK + " will be used for all nodes.");
|
||||
map = new HashMap<String, String>();
|
||||
map = Collections.emptyMap();
|
||||
}
|
||||
}
|
||||
List<String> results = new ArrayList<String>(names.size());
|
||||
|
@ -19,6 +19,7 @@
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@ -69,8 +70,8 @@ public class CompositeGroupsMapping
|
||||
public synchronized List<String> getGroups(String user) throws IOException {
|
||||
Set<String> groupSet = new TreeSet<String>();
|
||||
|
||||
List<String> groups = null;
|
||||
for (GroupMappingServiceProvider provider : providersList) {
|
||||
List<String> groups = Collections.emptyList();
|
||||
try {
|
||||
groups = provider.getGroups(user);
|
||||
} catch (Exception e) {
|
||||
@ -78,17 +79,15 @@ public synchronized List<String> getGroups(String user) throws IOException {
|
||||
user, provider.getClass().getSimpleName(), e.toString());
|
||||
LOG.debug("Stacktrace: ", e);
|
||||
}
|
||||
if (groups != null && ! groups.isEmpty()) {
|
||||
if (!groups.isEmpty()) {
|
||||
groupSet.addAll(groups);
|
||||
if (!combined) break;
|
||||
}
|
||||
}
|
||||
|
||||
List<String> results = new ArrayList<String>(groupSet.size());
|
||||
results.addAll(groupSet);
|
||||
return results;
|
||||
return new ArrayList<>(groupSet);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Caches groups, no need to do that for this provider
|
||||
*/
|
||||
|
@ -20,6 +20,7 @@
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.LinkedList;
|
||||
|
||||
@ -125,6 +126,6 @@ protected synchronized List<String> getUsersForNetgroup(String netgroup) {
|
||||
if (users != null && users.length != 0) {
|
||||
return Arrays.asList(users);
|
||||
}
|
||||
return new LinkedList<String>();
|
||||
return Collections.emptyList();
|
||||
}
|
||||
}
|
||||
|
@ -518,11 +518,11 @@ List<String> doGetGroups(String user, int goUpHierarchy)
|
||||
if (!results.hasMoreElements()) {
|
||||
LOG.debug("doGetGroups({}) returned no groups because the " +
|
||||
"user is not found.", user);
|
||||
return new ArrayList<>();
|
||||
return Collections.emptyList();
|
||||
}
|
||||
SearchResult result = results.nextElement();
|
||||
|
||||
List<String> groups = null;
|
||||
List<String> groups = Collections.emptyList();
|
||||
if (useOneQuery) {
|
||||
try {
|
||||
/**
|
||||
@ -548,7 +548,7 @@ List<String> doGetGroups(String user, int goUpHierarchy)
|
||||
"the second LDAP query using the user's DN.", e);
|
||||
}
|
||||
}
|
||||
if (groups == null || groups.isEmpty() || goUpHierarchy > 0) {
|
||||
if (groups.isEmpty() || goUpHierarchy > 0) {
|
||||
groups = lookupGroup(result, c, goUpHierarchy);
|
||||
}
|
||||
LOG.debug("doGetGroups({}) returned {}", user, groups);
|
||||
|
@ -17,9 +17,9 @@
|
||||
*/
|
||||
package org.apache.hadoop.security;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
@ -61,7 +61,7 @@ public static void getNetgroups(final String user,
|
||||
* @return list of cached groups
|
||||
*/
|
||||
public static List<String> getNetgroupNames() {
|
||||
return new LinkedList<String>(getGroups());
|
||||
return new ArrayList<>(getGroups());
|
||||
}
|
||||
|
||||
private static Set<String> getGroups() {
|
||||
|
@ -24,6 +24,7 @@
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.regex.Matcher;
|
||||
@ -534,7 +535,7 @@ synchronized private void updateMapIncr(final int id,
|
||||
static final class PassThroughMap<K> extends HashMap<K, K> {
|
||||
|
||||
public PassThroughMap() {
|
||||
this(new HashMap<K, K>());
|
||||
this(Collections.emptyMap());
|
||||
}
|
||||
|
||||
public PassThroughMap(Map<K, K> mapping) {
|
||||
|
@ -20,9 +20,8 @@
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.text.MessageFormat;
|
||||
import java.util.HashMap;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
@ -301,8 +300,7 @@ public boolean managementOperation(AuthenticationToken token,
|
||||
dt.decodeFromUrlString(tokenToRenew);
|
||||
long expirationTime = tokenManager.renewToken(dt,
|
||||
requestUgi.getShortUserName());
|
||||
map = new HashMap();
|
||||
map.put("long", expirationTime);
|
||||
map = Collections.singletonMap("long", expirationTime);
|
||||
} catch (IOException ex) {
|
||||
throw new AuthenticationException(ex.toString(), ex);
|
||||
}
|
||||
@ -358,13 +356,11 @@ public boolean managementOperation(AuthenticationToken token,
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static Map delegationTokenToJSON(Token token) throws IOException {
|
||||
Map json = new LinkedHashMap();
|
||||
json.put(
|
||||
Map json = Collections.singletonMap(
|
||||
KerberosDelegationTokenAuthenticator.DELEGATION_TOKEN_URL_STRING_JSON,
|
||||
token.encodeToUrlString());
|
||||
Map response = new LinkedHashMap();
|
||||
response.put(KerberosDelegationTokenAuthenticator.DELEGATION_TOKEN_JSON,
|
||||
json);
|
||||
Map response = Collections.singletonMap(
|
||||
KerberosDelegationTokenAuthenticator.DELEGATION_TOKEN_JSON, json);
|
||||
return response;
|
||||
}
|
||||
|
||||
|
@ -20,6 +20,7 @@
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@ -426,7 +427,7 @@ private void recordLifecycleEvent() {
|
||||
|
||||
@Override
|
||||
public synchronized List<LifecycleEvent> getLifecycleHistory() {
|
||||
return new ArrayList<LifecycleEvent>(lifecycleHistory);
|
||||
return Collections.unmodifiableList(new ArrayList<>(lifecycleHistory));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -483,8 +484,7 @@ public void removeBlocker(String name) {
|
||||
@Override
|
||||
public Map<String, String> getBlockers() {
|
||||
synchronized (blockerMap) {
|
||||
Map<String, String> map = new HashMap<String, String>(blockerMap);
|
||||
return map;
|
||||
return Collections.unmodifiableMap(new HashMap<>(blockerMap));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -19,6 +19,7 @@
|
||||
package org.apache.hadoop.service;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
@ -60,7 +61,7 @@ public CompositeService(String name) {
|
||||
*/
|
||||
public List<Service> getServices() {
|
||||
synchronized (serviceList) {
|
||||
return new ArrayList<Service>(serviceList);
|
||||
return Collections.unmodifiableList(new ArrayList<>(serviceList));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -23,6 +23,7 @@
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
@ -894,7 +895,7 @@ public List<String> extractCommandOptions(Configuration conf,
|
||||
List<String> args) {
|
||||
int size = args.size();
|
||||
if (size <= 1) {
|
||||
return new ArrayList<>(0);
|
||||
return Collections.emptyList();
|
||||
}
|
||||
List<String> coreArgs = args.subList(1, size);
|
||||
|
||||
|
@ -28,6 +28,7 @@
|
||||
import java.io.Writer;
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
@ -71,8 +72,8 @@ public static void createServletExceptionResponse(
|
||||
json.put(ERROR_MESSAGE_JSON, getOneLineMessage(ex));
|
||||
json.put(ERROR_EXCEPTION_JSON, ex.getClass().getSimpleName());
|
||||
json.put(ERROR_CLASSNAME_JSON, ex.getClass().getName());
|
||||
Map<String, Object> jsonResponse = new LinkedHashMap<String, Object>();
|
||||
jsonResponse.put(ERROR_JSON, json);
|
||||
Map<String, Object> jsonResponse =
|
||||
Collections.singletonMap(ERROR_JSON, json);
|
||||
Writer writer = response.getWriter();
|
||||
JsonSerialization.writer().writeValue(writer, jsonResponse);
|
||||
writer.flush();
|
||||
@ -91,8 +92,7 @@ public static Response createJerseyExceptionResponse(Response.Status status,
|
||||
json.put(ERROR_MESSAGE_JSON, getOneLineMessage(ex));
|
||||
json.put(ERROR_EXCEPTION_JSON, ex.getClass().getSimpleName());
|
||||
json.put(ERROR_CLASSNAME_JSON, ex.getClass().getName());
|
||||
Map<String, Object> response = new LinkedHashMap<String, Object>();
|
||||
response.put(ERROR_JSON, json);
|
||||
Map<String, Object> response = Collections.singletonMap(ERROR_JSON, json);
|
||||
return Response.status(status).type(MediaType.APPLICATION_JSON).
|
||||
entity(response).build();
|
||||
}
|
||||
|
@ -28,6 +28,7 @@
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.Timer;
|
||||
import java.util.TimerTask;
|
||||
@ -871,6 +872,7 @@ protected Shell(long interval, boolean redirectErrorStream) {
|
||||
this.interval = interval;
|
||||
this.lastTime = (interval < 0) ? 0 : -interval;
|
||||
this.redirectErrorStream = redirectErrorStream;
|
||||
this.environment = Collections.emptyMap();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -878,7 +880,7 @@ protected Shell(long interval, boolean redirectErrorStream) {
|
||||
* @param env Mapping of environment variables
|
||||
*/
|
||||
protected void setEnvironment(Map<String, String> env) {
|
||||
this.environment = env;
|
||||
this.environment = Objects.requireNonNull(env);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -915,9 +917,7 @@ private void runCommand() throws IOException {
|
||||
builder.environment().clear();
|
||||
}
|
||||
|
||||
if (environment != null) {
|
||||
builder.environment().putAll(this.environment);
|
||||
}
|
||||
builder.environment().putAll(this.environment);
|
||||
|
||||
if (dir != null) {
|
||||
builder.directory(this.dir);
|
||||
|
Loading…
Reference in New Issue
Block a user