HADOOP-15550. Avoid static initialization of ObjectMappers

This commit is contained in:
Todd Lipcon 2018-06-25 15:36:45 -07:00
parent c687a6617d
commit 7a3c6e9c3c
10 changed files with 45 additions and 47 deletions

View File

@ -42,6 +42,7 @@
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier; import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL; import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL;
import org.apache.hadoop.util.HttpExceptionUtils; import org.apache.hadoop.util.HttpExceptionUtils;
import org.apache.hadoop.util.JsonSerialization;
import org.apache.hadoop.util.KMSUtil; import org.apache.hadoop.util.KMSUtil;
import org.apache.http.client.utils.URIBuilder; import org.apache.http.client.utils.URIBuilder;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -79,7 +80,6 @@
import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension; import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.base.Strings; import com.google.common.base.Strings;
@ -132,9 +132,6 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension,
private final ValueQueue<EncryptedKeyVersion> encKeyVersionQueue; private final ValueQueue<EncryptedKeyVersion> encKeyVersionQueue;
private static final ObjectWriter WRITER =
new ObjectMapper().writerWithDefaultPrettyPrinter();
private final Text dtService; private final Text dtService;
// Allow fallback to default kms server port 9600 for certain tests that do // Allow fallback to default kms server port 9600 for certain tests that do
@ -237,7 +234,7 @@ public KMSEncryptedKeyVersion(String keyName, String keyVersionName,
private static void writeJson(Object obj, OutputStream os) private static void writeJson(Object obj, OutputStream os)
throws IOException { throws IOException {
Writer writer = new OutputStreamWriter(os, StandardCharsets.UTF_8); Writer writer = new OutputStreamWriter(os, StandardCharsets.UTF_8);
WRITER.writeValue(writer, obj); JsonSerialization.writer().writeValue(writer, obj);
} }
/** /**

View File

@ -17,8 +17,6 @@
*/ */
package org.apache.hadoop.security.token.delegation.web; package org.apache.hadoop.security.token.delegation.web;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
@ -31,6 +29,7 @@
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier; import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
import org.apache.hadoop.util.HttpExceptionUtils; import org.apache.hadoop.util.HttpExceptionUtils;
import org.apache.hadoop.util.JsonSerialization;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -56,9 +55,6 @@ public abstract class DelegationTokenAuthenticator implements Authenticator {
private static final String CONTENT_TYPE = "Content-Type"; private static final String CONTENT_TYPE = "Content-Type";
private static final String APPLICATION_JSON_MIME = "application/json"; private static final String APPLICATION_JSON_MIME = "application/json";
private static final ObjectReader READER =
new ObjectMapper().readerFor(Map.class);
private static final String HTTP_GET = "GET"; private static final String HTTP_GET = "GET";
private static final String HTTP_PUT = "PUT"; private static final String HTTP_PUT = "PUT";
@ -328,7 +324,7 @@ private Map doDelegationTokenOperation(URL url,
if (contentType != null && if (contentType != null &&
contentType.contains(APPLICATION_JSON_MIME)) { contentType.contains(APPLICATION_JSON_MIME)) {
try { try {
ret = READER.readValue(conn.getInputStream()); ret = JsonSerialization.mapReader().readValue(conn.getInputStream());
} catch (Exception ex) { } catch (Exception ex) {
throw new AuthenticationException(String.format( throw new AuthenticationException(String.format(
"'%s' did not handle the '%s' delegation token operation: %s", "'%s' did not handle the '%s' delegation token operation: %s",

View File

@ -17,9 +17,6 @@
*/ */
package org.apache.hadoop.util; package org.apache.hadoop.util;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
@ -56,11 +53,6 @@ public class HttpExceptionUtils {
private static final String ENTER = System.getProperty("line.separator"); private static final String ENTER = System.getProperty("line.separator");
private static final ObjectReader READER =
new ObjectMapper().readerFor(Map.class);
private static final ObjectWriter WRITER =
new ObjectMapper().writerWithDefaultPrettyPrinter();
/** /**
* Creates a HTTP servlet response serializing the exception in it as JSON. * Creates a HTTP servlet response serializing the exception in it as JSON.
* *
@ -82,7 +74,7 @@ public static void createServletExceptionResponse(
Map<String, Object> jsonResponse = new LinkedHashMap<String, Object>(); Map<String, Object> jsonResponse = new LinkedHashMap<String, Object>();
jsonResponse.put(ERROR_JSON, json); jsonResponse.put(ERROR_JSON, json);
Writer writer = response.getWriter(); Writer writer = response.getWriter();
WRITER.writeValue(writer, jsonResponse); JsonSerialization.writer().writeValue(writer, jsonResponse);
writer.flush(); writer.flush();
} }
@ -150,7 +142,7 @@ public static void validateResponse(HttpURLConnection conn,
InputStream es = null; InputStream es = null;
try { try {
es = conn.getErrorStream(); es = conn.getErrorStream();
Map json = READER.readValue(es); Map json = JsonSerialization.mapReader().readValue(es);
json = (Map) json.get(ERROR_JSON); json = (Map) json.get(ERROR_JSON);
String exClass = (String) json.get(ERROR_CLASSNAME_JSON); String exClass = (String) json.get(ERROR_CLASSNAME_JSON);
String exMsg = (String) json.get(ERROR_MESSAGE_JSON); String exMsg = (String) json.get(ERROR_MESSAGE_JSON);

View File

@ -25,14 +25,18 @@
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.OutputStream; import java.io.OutputStream;
import java.util.Map;
import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.fasterxml.jackson.databind.SerializationFeature; import com.fasterxml.jackson.databind.SerializationFeature;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -65,6 +69,26 @@ public class JsonSerialization<T> {
private final Class<T> classType; private final Class<T> classType;
private final ObjectMapper mapper; private final ObjectMapper mapper;
private static final ObjectWriter WRITER =
new ObjectMapper().writerWithDefaultPrettyPrinter();
private static final ObjectReader MAP_READER =
new ObjectMapper().readerFor(Map.class);
/**
* @return an ObjectWriter which pretty-prints its output
*/
public static ObjectWriter writer() {
return WRITER;
}
/**
* @return an ObjectReader which returns simple Maps.
*/
public static ObjectReader mapReader() {
return MAP_READER;
}
/** /**
* Create an instance bound to a specific type. * Create an instance bound to a specific type.
* @param classType class to marshall * @param classType class to marshall

View File

@ -17,10 +17,9 @@
*/ */
package org.apache.hadoop.crypto.key.kms.server; package org.apache.hadoop.crypto.key.kms.server;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.http.JettyUtils; import org.apache.hadoop.http.JettyUtils;
import org.apache.hadoop.util.JsonSerialization;
import javax.ws.rs.Produces; import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException; import javax.ws.rs.WebApplicationException;
@ -67,8 +66,7 @@ public void writeTo(Object obj, Class<?> aClass, Type type,
OutputStream outputStream) throws IOException, WebApplicationException { OutputStream outputStream) throws IOException, WebApplicationException {
Writer writer = new OutputStreamWriter(outputStream, Charset Writer writer = new OutputStreamWriter(outputStream, Charset
.forName("UTF-8")); .forName("UTF-8"));
ObjectMapper jsonMapper = new ObjectMapper(); JsonSerialization.writer().writeValue(writer, obj);
jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, obj);
} }
} }

View File

@ -56,8 +56,6 @@
import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.commons.io.input.BoundedInputStream; import org.apache.commons.io.input.BoundedInputStream;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -121,6 +119,7 @@
import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.security.token.TokenSelector; import org.apache.hadoop.security.token.TokenSelector;
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSelector; import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSelector;
import org.apache.hadoop.util.JsonSerialization;
import org.apache.hadoop.util.KMSUtil; import org.apache.hadoop.util.KMSUtil;
import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
@ -172,8 +171,6 @@ public class WebHdfsFileSystem extends FileSystem
private boolean disallowFallbackToInsecureCluster; private boolean disallowFallbackToInsecureCluster;
private String restCsrfCustomHeader; private String restCsrfCustomHeader;
private Set<String> restCsrfMethodsToIgnore; private Set<String> restCsrfMethodsToIgnore;
private static final ObjectReader READER =
new ObjectMapper().readerFor(Map.class);
private DFSOpsCountStatistics storageStatistics; private DFSOpsCountStatistics storageStatistics;
@ -476,7 +473,7 @@ private Path makeAbsolute(Path f) {
+ "\" (parsed=\"" + parsed + "\")"); + "\" (parsed=\"" + parsed + "\")");
} }
} }
return READER.readValue(in); return JsonSerialization.mapReader().readValue(in);
} finally { } finally {
in.close(); in.close();
} }

View File

@ -18,8 +18,6 @@
*/ */
package org.apache.hadoop.hdfs.web.oauth2; package org.apache.hadoop.hdfs.web.oauth2;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.squareup.okhttp.OkHttpClient; import com.squareup.okhttp.OkHttpClient;
import com.squareup.okhttp.Request; import com.squareup.okhttp.Request;
import com.squareup.okhttp.RequestBody; import com.squareup.okhttp.RequestBody;
@ -28,6 +26,7 @@
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.web.URLConnectionFactory; import org.apache.hadoop.hdfs.web.URLConnectionFactory;
import org.apache.hadoop.util.JsonSerialization;
import org.apache.hadoop.util.Timer; import org.apache.hadoop.util.Timer;
import org.apache.http.HttpStatus; import org.apache.http.HttpStatus;
@ -55,8 +54,6 @@
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class ConfRefreshTokenBasedAccessTokenProvider public class ConfRefreshTokenBasedAccessTokenProvider
extends AccessTokenProvider { extends AccessTokenProvider {
private static final ObjectReader READER =
new ObjectMapper().readerFor(Map.class);
public static final String OAUTH_REFRESH_TOKEN_KEY public static final String OAUTH_REFRESH_TOKEN_KEY
= "dfs.webhdfs.oauth2.refresh.token"; = "dfs.webhdfs.oauth2.refresh.token";
@ -129,7 +126,8 @@ void refresh() throws IOException {
+ responseBody.code() + ", text = " + responseBody.toString()); + responseBody.code() + ", text = " + responseBody.toString());
} }
Map<?, ?> response = READER.readValue(responseBody.body().string()); Map<?, ?> response = JsonSerialization.mapReader().readValue(
responseBody.body().string());
String newExpiresIn = response.get(EXPIRES_IN).toString(); String newExpiresIn = response.get(EXPIRES_IN).toString();
accessTokenTimer.setExpiresIn(newExpiresIn); accessTokenTimer.setExpiresIn(newExpiresIn);

View File

@ -18,8 +18,6 @@
*/ */
package org.apache.hadoop.hdfs.web.oauth2; package org.apache.hadoop.hdfs.web.oauth2;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.squareup.okhttp.OkHttpClient; import com.squareup.okhttp.OkHttpClient;
import com.squareup.okhttp.Request; import com.squareup.okhttp.Request;
import com.squareup.okhttp.RequestBody; import com.squareup.okhttp.RequestBody;
@ -28,6 +26,7 @@
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.web.URLConnectionFactory; import org.apache.hadoop.hdfs.web.URLConnectionFactory;
import org.apache.hadoop.util.JsonSerialization;
import org.apache.hadoop.util.Timer; import org.apache.hadoop.util.Timer;
import org.apache.http.HttpStatus; import org.apache.http.HttpStatus;
@ -55,8 +54,6 @@
@InterfaceStability.Evolving @InterfaceStability.Evolving
public abstract class CredentialBasedAccessTokenProvider public abstract class CredentialBasedAccessTokenProvider
extends AccessTokenProvider { extends AccessTokenProvider {
private static final ObjectReader READER =
new ObjectMapper().readerFor(Map.class);
public static final String OAUTH_CREDENTIAL_KEY public static final String OAUTH_CREDENTIAL_KEY
= "dfs.webhdfs.oauth2.credential"; = "dfs.webhdfs.oauth2.credential";
@ -123,7 +120,8 @@ void refresh() throws IOException {
+ responseBody.code() + ", text = " + responseBody.toString()); + responseBody.code() + ", text = " + responseBody.toString());
} }
Map<?, ?> response = READER.readValue(responseBody.body().string()); Map<?, ?> response = JsonSerialization.mapReader().readValue(
responseBody.body().string());
String newExpiresIn = response.get(EXPIRES_IN).toString(); String newExpiresIn = response.get(EXPIRES_IN).toString();
timer.setExpiresIn(newExpiresIn); timer.setExpiresIn(newExpiresIn);

View File

@ -34,8 +34,6 @@
import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -61,6 +59,7 @@
import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.JsonSerialization;
import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.yarn.api.records.ReservationId; import org.apache.hadoop.yarn.api.records.ReservationId;
@ -71,8 +70,6 @@
class JobSubmitter { class JobSubmitter {
protected static final Logger LOG = protected static final Logger LOG =
LoggerFactory.getLogger(JobSubmitter.class); LoggerFactory.getLogger(JobSubmitter.class);
private static final ObjectReader READER =
new ObjectMapper().readerFor(Map.class);
private static final String SHUFFLE_KEYGEN_ALGORITHM = "HmacSHA1"; private static final String SHUFFLE_KEYGEN_ALGORITHM = "HmacSHA1";
private static final int SHUFFLE_KEY_LENGTH = 64; private static final int SHUFFLE_KEY_LENGTH = 64;
private FileSystem jtFs; private FileSystem jtFs;
@ -406,7 +403,8 @@ private void readTokensFromFiles(Configuration conf, Credentials credentials)
try { try {
// read JSON // read JSON
Map<String, String> nm = READER.readValue(new File(localFileName)); Map<String, String> nm = JsonSerialization.mapReader().readValue(
new File(localFileName));
for(Map.Entry<String, String> ent: nm.entrySet()) { for(Map.Entry<String, String> ent: nm.entrySet()) {
credentials.addSecretKey(new Text(ent.getKey()), ent.getValue() credentials.addSecretKey(new Text(ent.getKey()), ent.getValue()

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.fs.azure.security; package org.apache.hadoop.fs.azure.security;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.util.JsonSerialization;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -37,8 +38,7 @@ private JsonUtils() {
public static Map<?, ?> parse(final String jsonString) throws IOException { public static Map<?, ?> parse(final String jsonString) throws IOException {
try { try {
ObjectMapper mapper = new ObjectMapper(); return JsonSerialization.mapReader().readValue(jsonString);
return mapper.readerFor(Map.class).readValue(jsonString);
} catch (Exception e) { } catch (Exception e) {
LOG.debug("JSON Parsing exception: {} while parsing {}", e.getMessage(), LOG.debug("JSON Parsing exception: {} while parsing {}", e.getMessage(),
jsonString); jsonString);