diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java index b8823352d0..a95d7e6917 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java @@ -77,6 +77,7 @@ import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension; import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectWriter; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Strings; @@ -133,6 +134,9 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension, private final ValueQueue encKeyVersionQueue; + private static final ObjectWriter WRITER = + new ObjectMapper().writerWithDefaultPrettyPrinter(); + private class EncryptedQueueRefiller implements ValueQueue.QueueRefiller { @@ -226,8 +230,7 @@ public KMSEncryptedKeyVersion(String keyName, String keyVersionName, private static void writeJson(Object obj, OutputStream os) throws IOException { Writer writer = new OutputStreamWriter(os, StandardCharsets.UTF_8); - ObjectMapper jsonMapper = new ObjectMapper(); - jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, obj); + WRITER.writeValue(writer, obj); } /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java index 5ae4e8bcc5..a847d1198d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java @@ -36,6 +36,7 @@ import javax.management.ObjectName; import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectWriter; import com.google.common.base.Preconditions; import com.google.common.util.concurrent.AtomicDoubleArray; import org.apache.commons.lang.exception.ExceptionUtils; @@ -128,6 +129,8 @@ public class DecayRpcScheduler implements RpcScheduler, public static final Logger LOG = LoggerFactory.getLogger(DecayRpcScheduler.class); + private static final ObjectWriter WRITER = new ObjectMapper().writer(); + // Track the decayed and raw (no decay) number of calls for each schedulable // identity from all previous decay windows: idx 0 for decayed call count and // idx 1 for the raw call count @@ -909,8 +912,7 @@ public String getSchedulingDecisionSummary() { return "{}"; } else { try { - ObjectMapper om = new ObjectMapper(); - return om.writeValueAsString(decisions); + return WRITER.writeValueAsString(decisions); } catch (Exception e) { return "Error: " + e.getMessage(); } @@ -919,8 +921,7 @@ public String getSchedulingDecisionSummary() { public String getCallVolumeSummary() { try { - ObjectMapper om = new ObjectMapper(); - return om.writeValueAsString(getDecayedCallCounts()); + return WRITER.writeValueAsString(getDecayedCallCounts()); } catch (Exception e) { return "Error: " + e.getMessage(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/Log4Json.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/Log4Json.java index 337846cd77..a2bbbfc2d8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/Log4Json.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/Log4Json.java @@ -24,6 +24,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.MappingJsonFactory; import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.node.ContainerNode; import org.apache.log4j.Layout; import org.apache.log4j.helpers.ISO8601DateFormat; @@ -105,6 +106,7 @@ public class Log4Json extends Layout { * configuration it must be done in a static intializer block. */ private static final JsonFactory factory = new MappingJsonFactory(); + private static final ObjectReader READER = new ObjectMapper(factory).reader(); public static final String DATE = "date"; public static final String EXCEPTION_CLASS = "exceptionclass"; public static final String LEVEL = "level"; @@ -252,8 +254,7 @@ public void activateOptions() { * @throws IOException on any parsing problems */ public static ContainerNode parse(String json) throws IOException { - ObjectMapper mapper = new ObjectMapper(factory); - JsonNode jsonNode = mapper.readTree(json); + JsonNode jsonNode = READER.readTree(json); if (!(jsonNode instanceof ContainerNode)) { throw new IOException("Wrong JSON data: " + json); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java index 8e42909b07..ce6fbe1d82 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java @@ -18,17 +18,18 @@ package org.apache.hadoop.metrics2; -import java.io.IOException; -import java.util.LinkedHashMap; -import java.util.Map; - import org.apache.commons.lang.exception.ExceptionUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.map.ObjectWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; + /** * Build a JSON dump of the metrics. * @@ -44,6 +45,9 @@ public class MetricsJsonBuilder extends MetricsRecordBuilder { private final MetricsCollector parent; private Map innerMetrics = new LinkedHashMap<>(); + private static final ObjectWriter WRITER = + new ObjectMapper().writer(); + /** * Build an instance. * @param parent parent collector. Unused in this instance; only used for @@ -116,7 +120,7 @@ public MetricsCollector parent() { @Override public String toString() { try { - return new ObjectMapper().writeValueAsString(innerMetrics); + return WRITER.writeValueAsString(innerMetrics); } catch (IOException e) { LOG.warn("Failed to dump to Json.", e); return ExceptionUtils.getStackTrace(e); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java index 06932ac623..3dba1dbc20 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java @@ -18,6 +18,7 @@ package org.apache.hadoop.security.token.delegation.web; import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectReader; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.security.SecurityUtil; @@ -54,6 +55,9 @@ public abstract class DelegationTokenAuthenticator implements Authenticator { private static final String CONTENT_TYPE = "Content-Type"; private static final String APPLICATION_JSON_MIME = "application/json"; + private static final ObjectReader READER = + new ObjectMapper().readerFor(Map.class); + private static final String HTTP_GET = "GET"; private static final String HTTP_PUT = "PUT"; @@ -316,8 +320,7 @@ private Map doDelegationTokenOperation(URL url, if (contentType != null && contentType.contains(APPLICATION_JSON_MIME)) { try { - ObjectMapper mapper = new ObjectMapper(); - ret = mapper.readValue(conn.getInputStream(), Map.class); + ret = READER.readValue(conn.getInputStream()); } catch (Exception ex) { throw new AuthenticationException(String.format( "'%s' did not handle the '%s' delegation token operation: %s", diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HttpExceptionUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HttpExceptionUtils.java index 52403d8717..cdb8112584 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HttpExceptionUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HttpExceptionUtils.java @@ -18,6 +18,8 @@ package org.apache.hadoop.util; import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectReader; +import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -54,6 +56,11 @@ public class HttpExceptionUtils { private static final String ENTER = System.getProperty("line.separator"); + private static final ObjectReader READER = + new ObjectMapper().readerFor(Map.class); + private static final ObjectWriter WRITER = + new ObjectMapper().writerWithDefaultPrettyPrinter(); + /** * Creates a HTTP servlet response serializing the exception in it as JSON. * @@ -74,9 +81,8 @@ public static void createServletExceptionResponse( json.put(ERROR_CLASSNAME_JSON, ex.getClass().getName()); Map jsonResponse = new LinkedHashMap(); jsonResponse.put(ERROR_JSON, json); - ObjectMapper jsonMapper = new ObjectMapper(); Writer writer = response.getWriter(); - jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, jsonResponse); + WRITER.writeValue(writer, jsonResponse); writer.flush(); } @@ -144,8 +150,7 @@ public static void validateResponse(HttpURLConnection conn, InputStream es = null; try { es = conn.getErrorStream(); - ObjectMapper mapper = new ObjectMapper(); - Map json = mapper.readValue(es, Map.class); + Map json = READER.readValue(es); json = (Map) json.get(ERROR_JSON); String exClass = (String) json.get(ERROR_CLASSNAME_JSON); String exMsg = (String) json.get(ERROR_MESSAGE_JSON);