diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java index 94285a4dfb..4f372374ab 100755 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java @@ -22,7 +22,6 @@ import com.ctc.wstx.io.StreamBootstrapper; import com.ctc.wstx.io.SystemId; import com.ctc.wstx.stax.WstxInputFactory; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import java.io.BufferedInputStream; @@ -101,6 +100,7 @@ import org.apache.hadoop.security.alias.CredentialProviderFactory; import org.apache.hadoop.thirdparty.com.google.common.base.Strings; import org.apache.hadoop.util.ConfigurationHelper; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringInterner; @@ -3792,8 +3792,7 @@ public static void dumpConfiguration(Configuration config, throw new IllegalArgumentException("Property " + propertyName + " not found"); } else { - JsonFactory dumpFactory = new JsonFactory(); - JsonGenerator dumpGenerator = dumpFactory.createGenerator(out); + JsonGenerator dumpGenerator = JacksonUtil.getSharedWriter().createGenerator(out); dumpGenerator.writeStartObject(); dumpGenerator.writeFieldName("property"); appendJSONProperty(dumpGenerator, config, propertyName, @@ -3831,8 +3830,7 @@ public static void dumpConfiguration(Configuration config, */ public static void dumpConfiguration(Configuration config, Writer out) throws IOException { - JsonFactory dumpFactory = new JsonFactory(); - JsonGenerator dumpGenerator = dumpFactory.createGenerator(out); + JsonGenerator dumpGenerator = JacksonUtil.getSharedWriter().createGenerator(out); dumpGenerator.writeStartObject(); dumpGenerator.writeFieldName("properties"); dumpGenerator.writeStartArray(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java index 6ee9068ea3..b5a6d88233 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java @@ -41,6 +41,7 @@ import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSelector; import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL; import org.apache.hadoop.util.HttpExceptionUtils; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.JsonSerialization; import org.apache.hadoop.util.KMSUtil; import org.apache.http.client.utils.URIBuilder; @@ -78,7 +79,6 @@ import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension; import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension; -import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.thirdparty.com.google.common.base.Strings; @@ -592,11 +592,10 @@ private T call(HttpURLConnection conn, Object jsonOutput, && conn.getContentType().trim().toLowerCase() .startsWith(APPLICATION_JSON_MIME) && klass != null) { - ObjectMapper mapper = new ObjectMapper(); InputStream is = null; try { is = conn.getInputStream(); - ret = mapper.readValue(is, klass); + ret = JacksonUtil.getSharedReader().readValue(is, klass); } finally { IOUtils.closeStream(is); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java index 63274bb01e..4d7cd023b5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java @@ -38,10 +38,10 @@ import javax.management.ObjectName; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.AtomicDoubleArray; import org.apache.commons.lang3.exception.ExceptionUtils; @@ -146,7 +146,7 @@ public class DecayRpcScheduler implements RpcScheduler, public static final Logger LOG = LoggerFactory.getLogger(DecayRpcScheduler.class); - private static final ObjectWriter WRITER = new ObjectMapper().writer(); + private static final ObjectWriter WRITER = JacksonUtil.getSharedWriter(); // Track the decayed and raw (no decay) number of calls for each schedulable // identity from all previous decay windows: idx 0 for decayed call cost and diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java index 0d9e7296d2..a808f07b0c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java @@ -121,6 +121,7 @@ import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.util.ExitUtil; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.ProtoUtil; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Time; @@ -130,7 +131,6 @@ import org.apache.hadoop.tracing.TraceScope; import org.apache.hadoop.tracing.Tracer; import org.apache.hadoop.tracing.TraceUtils; -import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; @@ -3843,9 +3843,8 @@ public int getNumOpenConnections() { * @return Get the NumOpenConnections/User. */ public String getNumOpenConnectionsPerUser() { - ObjectMapper mapper = new ObjectMapper(); try { - return mapper + return JacksonUtil.getSharedWriter() .writeValueAsString(connectionManager.getUserToConnectionsMap()); } catch (IOException ignored) { } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java index f089db5027..6f54364fff 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java @@ -43,13 +43,13 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.commons.lang3.NotImplementedException; import org.apache.hadoop.http.HttpServer2; +import org.apache.hadoop.util.JacksonUtil; /* * This servlet is based off of the JMXProxyServlet from Tomcat 7.0.14. It has @@ -134,11 +134,6 @@ public class JMXJsonServlet extends HttpServlet { */ protected transient MBeanServer mBeanServer; - /** - * Json Factory to create Json generators for write objects in json format - */ - protected transient JsonFactory jsonFactory; - /** * Initialize this servlet. */ @@ -146,7 +141,6 @@ public class JMXJsonServlet extends HttpServlet { public void init() throws ServletException { // Retrieve the MBean server mBeanServer = ManagementFactory.getPlatformMBeanServer(); - jsonFactory = new JsonFactory(); } protected boolean isInstrumentationAccessAllowed(HttpServletRequest request, @@ -187,7 +181,7 @@ public void doGet(HttpServletRequest request, HttpServletResponse response) { response.setHeader(ACCESS_CONTROL_ALLOW_METHODS, "GET"); response.setHeader(ACCESS_CONTROL_ALLOW_ORIGIN, "*"); - jg = jsonFactory.createGenerator(writer); + jg = JacksonUtil.getSharedWriter().createGenerator(writer); jg.disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET); jg.useDefaultPrettyPrinter(); jg.writeStartObject(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java index 3a9be12803..3534adfd69 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java @@ -21,8 +21,8 @@ import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.util.JacksonUtil; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -46,8 +46,7 @@ public class MetricsJsonBuilder extends MetricsRecordBuilder { private final MetricsCollector parent; private Map innerMetrics = new LinkedHashMap<>(); - private static final ObjectWriter WRITER = - new ObjectMapper().writer(); + private static final ObjectWriter WRITER = JacksonUtil.getSharedWriter(); /** * Build an instance. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java index f4ede6f35e..62c7c4ba6e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java @@ -46,6 +46,7 @@ import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier; import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager; import org.apache.hadoop.util.HttpExceptionUtils; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -165,7 +166,7 @@ public void initTokenManager(Properties config) { @VisibleForTesting public void initJsonFactory(Properties config) { boolean hasFeature = false; - JsonFactory tmpJsonFactory = new JsonFactory(); + JsonFactory tmpJsonFactory = JacksonUtil.createBasicJsonFactory(); for (Map.Entry entry : config.entrySet()) { String key = (String)entry.getKey(); @@ -335,7 +336,7 @@ public boolean managementOperation(AuthenticationToken token, if (map != null) { response.setContentType(MediaType.APPLICATION_JSON); Writer writer = response.getWriter(); - ObjectMapper jsonMapper = new ObjectMapper(jsonFactory); + ObjectMapper jsonMapper = JacksonUtil.createObjectMapper(jsonFactory); jsonMapper.writeValue(writer, map); writer.write(ENTER); writer.flush(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java new file mode 100644 index 0000000000..7d90555c87 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java @@ -0,0 +1,123 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.util; + +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectReader; +import com.fasterxml.jackson.databind.ObjectWriter; +import com.fasterxml.jackson.databind.json.JsonMapper; + +import org.apache.hadoop.classification.InterfaceAudience.Private; + +/** + * Utility for sharing code related to Jackson usage in Hadoop. + */ +@Private +public final class JacksonUtil { + + private static final ObjectMapper SHARED_BASIC_OBJECT_MAPPER = createBasicObjectMapper(); + private static final ObjectReader SHARED_BASIC_OBJECT_READER = + SHARED_BASIC_OBJECT_MAPPER.reader(); + private static final ObjectWriter SHARED_BASIC_OBJECT_WRITER = + SHARED_BASIC_OBJECT_MAPPER.writer(); + private static final ObjectWriter SHARED_BASIC_OBJECT_WRITER_PRETTY = + SHARED_BASIC_OBJECT_MAPPER.writerWithDefaultPrettyPrinter(); + + /** + * Creates a new {@link JsonFactory} instance with basic configuration. + * + * @return an {@link JsonFactory} with basic configuration + */ + public static JsonFactory createBasicJsonFactory() { + // deliberately return a new instance instead of sharing one because we can't trust + // that users won't modify this instance + return new JsonFactory(); + } + + /** + * Creates a new {@link ObjectMapper} instance with basic configuration. + * + * @return an {@link ObjectMapper} with basic configuration + */ + public static ObjectMapper createBasicObjectMapper() { + // deliberately return a new instance instead of sharing one because we can't trust + // that users won't modify this instance + return JsonMapper.builder(createBasicJsonFactory()).build(); + } + + /** + * Creates a new {@link ObjectMapper} instance based on the configuration + * in the input {@link JsonFactory}. + * + * @param jsonFactory a pre-configured {@link JsonFactory} + * @return an {@link ObjectMapper} with configuration set by the input {@link JsonFactory}. + */ + public static ObjectMapper createObjectMapper(final JsonFactory jsonFactory) { + return JsonMapper.builder(jsonFactory).build(); + } + + /** + * Returns a shared {@link ObjectReader} instance with basic configuration. + * + * @return a shared {@link ObjectReader} instance with basic configuration + */ + public static ObjectReader getSharedReader() { + return SHARED_BASIC_OBJECT_READER; + } + + /** + * Returns an {@link ObjectReader} for the given type instance with basic configuration. + * + * @param type the class that the reader has to support + * @return an {@link ObjectReader} instance with basic configuration + */ + public static ObjectReader createBasicReaderFor(Class type) { + return SHARED_BASIC_OBJECT_MAPPER.readerFor(type); + } + + /** + * Returns a shared {@link ObjectWriter} instance with basic configuration. + * + * @return a shared {@link ObjectWriter} instance with basic configuration + */ + public static ObjectWriter getSharedWriter() { + return SHARED_BASIC_OBJECT_WRITER; + } + + /** + * Returns a shared {@link ObjectWriter} instance with pretty print and basic configuration. + * + * @return a shared {@link ObjectWriter} instance with pretty print and basic configuration + */ + public static ObjectWriter getSharedWriterWithPrettyPrint() { + return SHARED_BASIC_OBJECT_WRITER_PRETTY; + } + + /** + * Returns an {@link ObjectWriter} for the given type instance with basic configuration. + * + * @param type the class that the writer has to support + * @return an {@link ObjectWriter} instance with basic configuration + */ + public static ObjectWriter createBasicWriterFor(Class type) { + return SHARED_BASIC_OBJECT_MAPPER.writerFor(type); + } + + private JacksonUtil() {} +} diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java index 52c6c45052..05b069c3ad 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java @@ -76,11 +76,8 @@ public class JsonSerialization { private final Class classType; private final ObjectMapper mapper; - private static final ObjectWriter WRITER = - new ObjectMapper().writerWithDefaultPrettyPrinter(); - - private static final ObjectReader MAP_READER = - new ObjectMapper().readerFor(Map.class); + private static final ObjectWriter WRITER = JacksonUtil.getSharedWriterWithPrettyPrint(); + private static final ObjectReader MAP_READER = JacksonUtil.createBasicReaderFor(Map.class); /** * @return an ObjectWriter which pretty-prints its output @@ -106,7 +103,7 @@ public JsonSerialization(Class classType, boolean failOnUnknownProperties, boolean pretty) { Preconditions.checkArgument(classType != null, "null classType"); this.classType = classType; - this.mapper = new ObjectMapper(); + this.mapper = JacksonUtil.createBasicObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, failOnUnknownProperties); mapper.configure(SerializationFeature.INDENT_OUTPUT, pretty); diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONReader.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONReader.java index af781f5277..2f7a6d8557 100644 --- a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONReader.java +++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONReader.java @@ -17,9 +17,8 @@ */ package org.apache.hadoop.crypto.key.kms.server; -import com.fasterxml.jackson.databind.ObjectMapper; - import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.util.JacksonUtil; import javax.ws.rs.Consumes; import javax.ws.rs.WebApplicationException; @@ -38,7 +37,6 @@ @Consumes(MediaType.APPLICATION_JSON) @InterfaceAudience.Private public class KMSJSONReader implements MessageBodyReader { - private static final ObjectMapper MAPPER = new ObjectMapper(); @Override public boolean isReadable(Class type, Type genericType, @@ -52,6 +50,6 @@ public Object readFrom(Class type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap httpHeaders, InputStream entityStream) throws IOException, WebApplicationException { - return MAPPER.readValue(entityStream, type); + return JacksonUtil.getSharedReader().readValue(entityStream, type); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkItem.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkItem.java index d1ad5a2079..041eb2912b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkItem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkItem.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hdfs.server.datanode; import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -35,9 +35,8 @@ @InterfaceStability.Unstable @JsonInclude(JsonInclude.Include.NON_DEFAULT) public class DiskBalancerWorkItem { - private static final ObjectMapper MAPPER = new ObjectMapper(); private static final ObjectReader READER = - new ObjectMapper().readerFor(DiskBalancerWorkItem.class); + JacksonUtil.createBasicReaderFor(DiskBalancerWorkItem.class); private long startTime; private long secondsElapsed; @@ -173,7 +172,7 @@ public void incBlocksCopied() { * @throws IOException */ public String toJson() throws IOException { - return MAPPER.writeValueAsString(this); + return JacksonUtil.getSharedWriter().writeValueAsString(this); } /** diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkStatus.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkStatus.java index 5a5da7326a..7ea6e9d885 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkStatus.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkStatus.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.SerializationFeature; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -39,14 +40,13 @@ @InterfaceAudience.Private @InterfaceStability.Unstable public class DiskBalancerWorkStatus { - private static final ObjectMapper MAPPER = new ObjectMapper(); + private static final ObjectMapper MAPPER = JacksonUtil.createBasicObjectMapper(); private static final ObjectMapper MAPPER_WITH_INDENT_OUTPUT = - new ObjectMapper().enable(SerializationFeature.INDENT_OUTPUT); + JacksonUtil.createBasicObjectMapper().enable(SerializationFeature.INDENT_OUTPUT); private static final ObjectReader READER_WORKSTATUS = - new ObjectMapper().readerFor(DiskBalancerWorkStatus.class); - private static final ObjectReader READER_WORKENTRY = new ObjectMapper() - .readerFor(defaultInstance().constructCollectionType(List.class, - DiskBalancerWorkEntry.class)); + MAPPER.readerFor(DiskBalancerWorkStatus.class); + private static final ObjectReader READER_WORKENTRY = MAPPER.readerFor( + defaultInstance().constructCollectionType(List.class, DiskBalancerWorkEntry.class)); private final List currentState; private Result result; diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java index 33f4934e54..a41b727ab2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java @@ -18,9 +18,7 @@ package org.apache.hadoop.hdfs.util; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.databind.JsonMappingException; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import java.io.File; @@ -42,6 +40,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties; +import org.apache.hadoop.util.JacksonUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -83,7 +82,6 @@ private CombinedHostsFileReader() { public static DatanodeAdminProperties[] readFile(final String hostsFilePath) throws IOException { DatanodeAdminProperties[] allDNs = new DatanodeAdminProperties[0]; - ObjectMapper objectMapper = new ObjectMapper(); File hostFile = new File(hostsFilePath); boolean tryOldFormat = false; @@ -91,7 +89,8 @@ private CombinedHostsFileReader() { try (Reader input = new InputStreamReader( Files.newInputStream(hostFile.toPath()), StandardCharsets.UTF_8)) { - allDNs = objectMapper.readValue(input, DatanodeAdminProperties[].class); + allDNs = JacksonUtil.getSharedReader() + .readValue(input, DatanodeAdminProperties[].class); } catch (JsonMappingException jme) { // The old format doesn't have json top-level token to enclose // the array. @@ -103,15 +102,12 @@ private CombinedHostsFileReader() { } if (tryOldFormat) { - ObjectReader objectReader = - objectMapper.readerFor(DatanodeAdminProperties.class); - JsonFactory jsonFactory = new JsonFactory(); + ObjectReader objectReader = JacksonUtil.createBasicReaderFor(DatanodeAdminProperties.class); List all = new ArrayList<>(); try (Reader input = new InputStreamReader(Files.newInputStream(Paths.get(hostsFilePath)), StandardCharsets.UTF_8)) { - Iterator iterator = - objectReader.readValues(jsonFactory.createParser(input)); + Iterator iterator = objectReader.readValues(input); while (iterator.hasNext()) { DatanodeAdminProperties properties = iterator.next(); all.add(properties); diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java index de4c12d556..dcd08cfc70 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java @@ -26,11 +26,11 @@ import java.nio.file.Paths; import java.util.Set; -import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties; +import org.apache.hadoop.util.JacksonUtil; /** * Writer support for JSON-based datanode configuration, an alternative format @@ -59,12 +59,10 @@ private CombinedHostsFileWriter() { */ public static void writeFile(final String hostsFile, final Set allDNs) throws IOException { - final ObjectMapper objectMapper = new ObjectMapper(); - try (Writer output = new OutputStreamWriter(Files.newOutputStream(Paths.get(hostsFile)), StandardCharsets.UTF_8)) { - objectMapper.writeValue(output, allDNs); + JacksonUtil.getSharedWriter().writeValue(output, allDNs); } } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java index 108f74997a..54a44b33b1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java @@ -17,12 +17,12 @@ */ package org.apache.hadoop.hdfs.web; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.thirdparty.com.google.common.collect.Maps; import org.apache.hadoop.fs.ContentSummary; @@ -654,7 +654,7 @@ static List toXAttrNames(final Map json) } final String namesInJson = (String) json.get("XAttrNames"); - ObjectReader reader = new ObjectMapper().readerFor(List.class); + ObjectReader reader = JacksonUtil.createBasicReaderFor(List.class); final List xattrs = reader.readValue(namesInJson); final List names = Lists.newArrayListWithCapacity(json.keySet().size()); diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java index dab4776575..1ec907004b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java @@ -71,6 +71,7 @@ import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticator; import org.apache.hadoop.security.token.delegation.web.KerberosDelegationTokenAuthenticator; import org.apache.hadoop.util.HttpExceptionUtils; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Lists; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.ReflectionUtils; @@ -1818,7 +1819,7 @@ public Collection getTrashRoots(boolean allUsers) { @VisibleForTesting static BlockLocation[] toBlockLocations(JSONObject json) throws IOException { - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); MapType subType = mapper.getTypeFactory().constructMapType(Map.class, String.class, BlockLocation[].class); MapType rootType = mapper.getTypeFactory().constructMapType(Map.class, diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowDiskTracker.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowDiskTracker.java index 798b5fb596..312d63daed 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowDiskTracker.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowDiskTracker.java @@ -21,7 +21,6 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList; @@ -32,6 +31,7 @@ import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.server.protocol.SlowDiskReports; import org.apache.hadoop.hdfs.server.protocol.SlowDiskReports.DiskOp; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Lists; import org.apache.hadoop.util.Timer; import org.slf4j.Logger; @@ -71,7 +71,7 @@ public class SlowDiskTracker { /** * ObjectWriter to convert JSON reports to String. */ - private static final ObjectWriter WRITER = new ObjectMapper().writer(); + private static final ObjectWriter WRITER = JacksonUtil.getSharedWriter(); /** * Number of disks to include in JSON report per operation. We will return diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowPeerTracker.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowPeerTracker.java index e4feb4815e..3774a9dbdf 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowPeerTracker.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowPeerTracker.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hdfs.server.blockmanagement; import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap; @@ -30,6 +29,7 @@ import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.server.protocol.OutlierMetrics; import org.apache.hadoop.hdfs.server.protocol.SlowPeerReports; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Timer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -75,7 +75,8 @@ public class SlowPeerTracker { /** * ObjectWriter to convert JSON reports to String. */ - private static final ObjectWriter WRITER = new ObjectMapper().writer(); + private static final ObjectWriter WRITER = JacksonUtil.getSharedWriter(); + /** * Number of nodes to include in JSON report. We will return nodes with * the highest number of votes from peers. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java index 6b026823f1..080418db08 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java @@ -79,18 +79,18 @@ import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.DiskChecker.DiskErrorException; import org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException; +import org.apache.hadoop.util.Preconditions; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Timer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.thirdparty.com.google.common.base.Joiner; -import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; /** @@ -103,10 +103,9 @@ public class FsVolumeImpl implements FsVolumeSpi { public static final Logger LOG = LoggerFactory.getLogger(FsVolumeImpl.class); - private static final ObjectWriter WRITER = - new ObjectMapper().writerWithDefaultPrettyPrinter(); + private static final ObjectWriter WRITER = JacksonUtil.getSharedWriterWithPrettyPrint(); private static final ObjectReader READER = - new ObjectMapper().readerFor(BlockIteratorState.class); + JacksonUtil.createBasicReaderFor(BlockIteratorState.class); private final FsDatasetImpl dataset; private final String storageID; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java index 69a4625731..816a765c52 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java @@ -32,7 +32,6 @@ import java.util.concurrent.atomic.AtomicLong; import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.hadoop.classification.InterfaceAudience; @@ -60,6 +59,7 @@ import org.apache.hadoop.hdfs.server.datanode.checker.VolumeCheckResult; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi; import org.apache.hadoop.util.DiskChecker.DiskErrorException; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Timer; @@ -369,7 +369,7 @@ public void releaseReservedSpace(long bytesToRelease) { } private static final ObjectWriter WRITER = - new ObjectMapper().writerWithDefaultPrettyPrinter(); + JacksonUtil.getSharedWriterWithPrettyPrint(); private static class ProvidedBlockIteratorState { ProvidedBlockIteratorState() { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java index c90b77e98d..e9ba658ecd 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hdfs.server.diskbalancer.command; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import org.apache.commons.cli.CommandLine; @@ -47,6 +46,7 @@ import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.HostsFileReader; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Lists; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -77,8 +77,7 @@ * Common interface for command handling. */ public abstract class Command extends Configured implements Closeable { - private static final ObjectReader READER = - new ObjectMapper().readerFor(HashMap.class); + private static final ObjectReader READER = JacksonUtil.createBasicReaderFor(HashMap.class); static final Logger LOG = LoggerFactory.getLogger(Command.class); private Map validArgs = new HashMap<>(); private URI clusterURI; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/connectors/JsonNodeConnector.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/connectors/JsonNodeConnector.java index 1cc82253f9..4e76c7e45e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/connectors/JsonNodeConnector.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/connectors/JsonNodeConnector.java @@ -17,15 +17,14 @@ package org.apache.hadoop.hdfs.server.diskbalancer.connectors; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Preconditions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerCluster; -import org.apache.hadoop.hdfs.server.diskbalancer.datamodel - .DiskBalancerDataNode; +import org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerDataNode; import java.io.File; import java.net.URL; @@ -38,7 +37,7 @@ public class JsonNodeConnector implements ClusterConnector { private static final Logger LOG = LoggerFactory.getLogger(JsonNodeConnector.class); private static final ObjectReader READER = - new ObjectMapper().readerFor(DiskBalancerCluster.class); + JacksonUtil.createBasicReaderFor(DiskBalancerCluster.class); private final URL clusterURI; /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java index 7e935a3f82..f24f92ff13 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java @@ -19,9 +19,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; -import org.apache.hadoop.util.Preconditions; import org.apache.commons.io.FileUtils; import org.slf4j.Logger; @@ -31,6 +29,8 @@ import org.apache.hadoop.hdfs.server.diskbalancer.planner.Planner; import org.apache.hadoop.hdfs.server.diskbalancer.planner.PlannerFactory; import org.apache.hadoop.hdfs.web.JsonUtil; +import org.apache.hadoop.util.JacksonUtil; +import org.apache.hadoop.util.Preconditions; import java.io.File; import java.io.IOException; @@ -73,7 +73,7 @@ public class DiskBalancerCluster { private static final Logger LOG = LoggerFactory.getLogger(DiskBalancerCluster.class); private static final ObjectReader READER = - new ObjectMapper().readerFor(DiskBalancerCluster.class); + JacksonUtil.createBasicReaderFor(DiskBalancerCluster.class); private final Set exclusionList; private final Set inclusionList; private ClusterConnector clusterConnector; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerVolume.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerVolume.java index e43b83e39c..e354a23519 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerVolume.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerVolume.java @@ -19,10 +19,10 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import org.apache.hadoop.hdfs.web.JsonUtil; +import org.apache.hadoop.util.JacksonUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -34,7 +34,7 @@ @JsonIgnoreProperties(ignoreUnknown = true) public class DiskBalancerVolume { private static final ObjectReader READER = - new ObjectMapper().readerFor(DiskBalancerVolume.class); + JacksonUtil.createBasicReaderFor(DiskBalancerVolume.class); private static final Logger LOG = LoggerFactory.getLogger(DiskBalancerVolume.class); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/planner/NodePlan.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/planner/NodePlan.java index 39a7c57bca..3dfd27dde4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/planner/NodePlan.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/planner/NodePlan.java @@ -18,9 +18,9 @@ package org.apache.hadoop.hdfs.server.diskbalancer.planner; import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectWriter; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Preconditions; import java.io.IOException; @@ -39,10 +39,8 @@ public class NodePlan { private int port; private long timeStamp; - private static final ObjectMapper MAPPER = new ObjectMapper(); - private static final ObjectReader READER = MAPPER.readerFor(NodePlan.class); - private static final ObjectWriter WRITER = MAPPER.writerFor( - MAPPER.constructType(NodePlan.class)); + private static final ObjectReader READER = JacksonUtil.createBasicReaderFor(NodePlan.class); + private static final ObjectWriter WRITER = JacksonUtil.createBasicWriterFor(NodePlan.class); /** * returns timestamp when this plan was created. * diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NetworkTopologyServlet.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NetworkTopologyServlet.java index a646028083..16d9e203d3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NetworkTopologyServlet.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NetworkTopologyServlet.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.hdfs.server.namenode; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager; @@ -26,6 +25,7 @@ import org.apache.hadoop.net.NodeBase; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.thirdparty.com.google.common.net.HttpHeaders; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.StringUtils; import javax.servlet.ServletContext; @@ -123,8 +123,7 @@ protected void printTopology(PrintStream stream, List leaves, protected void printJsonFormat(PrintStream stream, Map> tree, ArrayList racks) throws IOException { - JsonFactory dumpFactory = new JsonFactory(); - JsonGenerator dumpGenerator = dumpFactory.createGenerator(stream); + JsonGenerator dumpGenerator = JacksonUtil.getSharedWriter().createGenerator(stream); dumpGenerator.writeStartArray(); for(String r : racks) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StartupProgressServlet.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StartupProgressServlet.java index 449a1aa62a..17cd49c2d5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StartupProgressServlet.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StartupProgressServlet.java @@ -21,7 +21,6 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import org.apache.hadoop.hdfs.server.namenode.startupprogress.Phase; import org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgress; @@ -29,6 +28,7 @@ import org.apache.hadoop.hdfs.server.namenode.startupprogress.Step; import org.apache.hadoop.hdfs.server.namenode.startupprogress.StepType; import org.apache.hadoop.io.IOUtils; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.classification.InterfaceAudience; @@ -61,7 +61,7 @@ protected void doGet(HttpServletRequest req, HttpServletResponse resp) StartupProgress prog = NameNodeHttpServer.getStartupProgressFromContext( getServletContext()); StartupProgressView view = prog.createView(); - JsonGenerator json = new JsonFactory().createGenerator(resp.getWriter()); + JsonGenerator json = JacksonUtil.getSharedWriter().createGenerator(resp.getWriter()); try { json.writeStartObject(); json.writeNumberField(ELAPSED_TIME, view.getElapsedTime()); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java index 5f90404ebe..1ec6730bb8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hdfs.web; +import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.ContentSummary; @@ -38,13 +39,12 @@ import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Lists; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap; -import com.fasterxml.jackson.databind.ObjectMapper; - import java.io.IOException; import java.util.*; @@ -52,11 +52,11 @@ public class JsonUtil { private static final Object[] EMPTY_OBJECT_ARRAY = {}; - // Reuse ObjectMapper instance for improving performance. - // ObjectMapper is thread safe as long as we always configure instance + // Reuse ObjectWriter instance for improving performance. + // ObjectWriter is thread safe as long as we always configure instance // before use. We don't have a re-entrant call pattern in WebHDFS, // so we just need to worry about thread-safety. - private static final ObjectMapper MAPPER = new ObjectMapper(); + private static final ObjectWriter SHARED_WRITER = JacksonUtil.getSharedWriter(); /** Convert a token object to a Json string. */ public static String toJsonString(final Token token @@ -93,7 +93,7 @@ public static String toJsonString(final String key, final Object value) { final Map m = new TreeMap(); m.put(key, value); try { - return MAPPER.writeValueAsString(m); + return SHARED_WRITER.writeValueAsString(m); } catch (IOException ignored) { } return null; @@ -113,7 +113,7 @@ public static String toJsonString(final HdfsFileStatus status, final Map m = toJsonMap(status); try { return includeType ? - toJsonString(FileStatus.class, m) : MAPPER.writeValueAsString(m); + toJsonString(FileStatus.class, m) : SHARED_WRITER.writeValueAsString(m); } catch (IOException ignored) { } return null; @@ -453,7 +453,7 @@ public static String toJsonString(final AclStatus status) { finalMap.put(AclStatus.class.getSimpleName(), m); try { - return MAPPER.writeValueAsString(finalMap); + return SHARED_WRITER.writeValueAsString(finalMap); } catch (IOException ignored) { } return null; @@ -491,7 +491,7 @@ public static String toJsonString(final List xAttrs, final XAttrCodec encoding) throws IOException { final Map finalMap = new TreeMap(); finalMap.put("XAttrs", toJsonArray(xAttrs, encoding)); - return MAPPER.writeValueAsString(finalMap); + return SHARED_WRITER.writeValueAsString(finalMap); } public static String toJsonString(final List xAttrs) @@ -500,14 +500,14 @@ public static String toJsonString(final List xAttrs) for (XAttr xAttr : xAttrs) { names.add(XAttrHelper.getPrefixedName(xAttr)); } - String ret = MAPPER.writeValueAsString(names); + String ret = SHARED_WRITER.writeValueAsString(names); final Map finalMap = new TreeMap(); finalMap.put("XAttrNames", ret); - return MAPPER.writeValueAsString(finalMap); + return SHARED_WRITER.writeValueAsString(finalMap); } public static String toJsonString(Object obj) throws IOException { - return MAPPER.writeValueAsString(obj); + return SHARED_WRITER.writeValueAsString(obj); } public static String toJsonString(BlockStoragePolicy[] storagePolicies) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java index ec43bce678..3a44b42792 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java @@ -18,7 +18,6 @@ package org.apache.hadoop.mapred; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerationException; import com.fasterxml.jackson.core.JsonGenerator; import org.apache.hadoop.classification.InterfaceAudience; @@ -28,6 +27,7 @@ import org.apache.hadoop.mapreduce.QueueState; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.AccessControlList; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -531,8 +531,7 @@ static void dumpConfiguration(Writer out, String configFile, return; } - JsonFactory dumpFactory = new JsonFactory(); - JsonGenerator dumpGenerator = dumpFactory.createGenerator(out); + JsonGenerator dumpGenerator = JacksonUtil.getSharedWriter().createGenerator(out); QueueConfigurationParser parser; boolean aclsEnabled = false; if (conf != null) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/JobHistoryEventUtils.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/JobHistoryEventUtils.java index b5c8b1178d..c7cd7a63a8 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/JobHistoryEventUtils.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/JobHistoryEventUtils.java @@ -28,6 +28,7 @@ import org.apache.hadoop.mapreduce.Counter; import org.apache.hadoop.mapreduce.CounterGroup; import org.apache.hadoop.mapreduce.Counters; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric; /** @@ -41,7 +42,7 @@ private JobHistoryEventUtils() { public static final int ATS_CONFIG_PUBLISH_SIZE_BYTES = 10 * 1024; public static JsonNode countersToJSON(Counters counters) { - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); ArrayNode nodes = mapper.createArrayNode(); if (counters != null) { for (CounterGroup counterGroup : counters) { diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/S3AEncryption.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/S3AEncryption.java index a720d2ca10..00692abcf1 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/S3AEncryption.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/S3AEncryption.java @@ -22,7 +22,6 @@ import java.nio.charset.StandardCharsets; import java.util.Map; -import com.fasterxml.jackson.databind.ObjectMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -30,6 +29,7 @@ import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.s3a.S3AUtils; +import org.apache.hadoop.util.JacksonUtil; import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_CONTEXT; @@ -91,8 +91,8 @@ public static String getS3EncryptionContextBase64Encoded( if (encryptionContextMap.isEmpty()) { return ""; } - final String encryptionContextJson = new ObjectMapper().writeValueAsString( - encryptionContextMap); + final String encryptionContextJson = JacksonUtil.getSharedWriter() + .writeValueAsString(encryptionContextMap); return Base64.encodeBase64String(encryptionContextJson.getBytes(StandardCharsets.UTF_8)); } catch (IOException e) { if (propagateExceptions) { diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java index 4e777da8b4..2b59452a32 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java @@ -84,6 +84,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.LambdaUtils; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.Time; @@ -96,7 +97,6 @@ import static org.apache.hadoop.fs.azure.NativeAzureFileSystemHelper.*; import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapabilityArgs; -import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.classification.VisibleForTesting; import com.microsoft.azure.storage.StorageException; @@ -127,7 +127,7 @@ public static class FolderRenamePending { private static final int FORMATTING_BUFFER = 10000; private boolean committed; public static final String SUFFIX = "-RenamePending.json"; - private static final ObjectReader READER = new ObjectMapper() + private static final ObjectReader READER = JacksonUtil.createBasicObjectMapper() .configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true) .readerFor(JsonNode.class); diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java index 473fa54f97..3f8862e6d1 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java @@ -24,11 +24,11 @@ import java.util.List; import java.util.concurrent.TimeUnit; -import com.fasterxml.jackson.databind.ObjectReader; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.azure.security.Constants; import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.io.retry.RetryUtils; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.http.NameValuePair; @@ -40,7 +40,7 @@ import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.JsonMappingException; -import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectReader; import static org.apache.hadoop.fs.azure.WasbRemoteCallHelper.REMOTE_CALL_SUCCESS_CODE; @@ -53,8 +53,8 @@ public class RemoteSASKeyGeneratorImpl extends SASKeyGeneratorImpl { public static final Logger LOG = LoggerFactory.getLogger(AzureNativeFileSystemStore.class); - private static final ObjectReader RESPONSE_READER = new ObjectMapper() - .readerFor(RemoteSASKeyGenerationResponse.class); + private static final ObjectReader RESPONSE_READER = JacksonUtil + .createBasicReaderFor(RemoteSASKeyGenerationResponse.class); /** * Configuration parameter name expected in the Configuration diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteWasbAuthorizerImpl.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteWasbAuthorizerImpl.java index eca8443b6c..7bcaecdba5 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteWasbAuthorizerImpl.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteWasbAuthorizerImpl.java @@ -20,7 +20,6 @@ import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.JsonMappingException; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.commons.lang3.StringUtils; @@ -29,13 +28,14 @@ import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.io.retry.RetryUtils; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.util.JacksonUtil; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.utils.URIBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.concurrent.TimeUnit; import java.io.IOException; +import java.util.concurrent.TimeUnit; import static org.apache.hadoop.fs.azure.WasbRemoteCallHelper.REMOTE_CALL_SUCCESS_CODE; @@ -49,8 +49,8 @@ public class RemoteWasbAuthorizerImpl implements WasbAuthorizerInterface { public static final Logger LOG = LoggerFactory .getLogger(RemoteWasbAuthorizerImpl.class); - private static final ObjectReader RESPONSE_READER = new ObjectMapper() - .readerFor(RemoteWasbAuthorizerResponse.class); + private static final ObjectReader RESPONSE_READER = JacksonUtil + .createBasicReaderFor(RemoteWasbAuthorizerResponse.class); /** * Configuration parameter name expected in the Configuration object to @@ -176,7 +176,7 @@ private boolean authorizeInternal(String wasbAbsolutePath, String accessType, St uriBuilder .addParameter(WASB_ABSOLUTE_PATH_QUERY_PARAM_NAME, wasbAbsolutePath); uriBuilder.addParameter(ACCESS_OPERATION_QUERY_PARAM_NAME, accessType); - if (resourceOwner != null && StringUtils.isNotEmpty(resourceOwner)) { + if (StringUtils.isNotEmpty(resourceOwner)) { uriBuilder.addParameter(WASB_RESOURCE_OWNER_QUERY_PARAM_NAME, resourceOwner); } diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/oauth2/AzureADAuthenticator.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/oauth2/AzureADAuthenticator.java index dab4d79658..ab0282e19f 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/oauth2/AzureADAuthenticator.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/oauth2/AzureADAuthenticator.java @@ -29,9 +29,6 @@ import java.util.Hashtable; import java.util.Map; -import org.apache.hadoop.util.Preconditions; - -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; import org.slf4j.Logger; @@ -42,6 +39,8 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.azurebfs.services.AbfsIoUtils; import org.apache.hadoop.fs.azurebfs.services.ExponentialRetryPolicy; +import org.apache.hadoop.util.JacksonUtil; +import org.apache.hadoop.util.Preconditions; /** * This class provides convenience methods to obtain AAD tokens. @@ -493,8 +492,7 @@ private static AzureADToken parseTokenFromStream( int expiryPeriodInSecs = 0; long expiresOnInSecs = -1; - JsonFactory jf = new JsonFactory(); - JsonParser jp = jf.createParser(httpResponseStream); + JsonParser jp = JacksonUtil.createBasicJsonFactory().createParser(httpResponseStream); String fieldName, fieldValue; jp.nextToken(); while (jp.hasCurrentToken()) { diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java index e2ce5c628a..658f2cfe65 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java @@ -30,7 +30,6 @@ import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; -import com.fasterxml.jackson.databind.ObjectMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -40,6 +39,7 @@ import org.apache.hadoop.fs.azurebfs.contracts.services.AbfsPerfLoggable; import org.apache.hadoop.fs.azurebfs.contracts.services.ListResultSchema; import org.apache.hadoop.fs.azurebfs.utils.UriUtils; +import org.apache.hadoop.util.JacksonUtil; /** * Base Http operation class for orchestrating server IO calls. Child classes would @@ -447,7 +447,7 @@ private void processStorageErrorResponse() { if (stream == null) { return; } - JsonFactory jf = new JsonFactory(); + JsonFactory jf = JacksonUtil.createBasicJsonFactory(); try (JsonParser jp = jf.createParser(stream)) { String fieldName, fieldValue; jp.nextToken(); // START_OBJECT - { @@ -509,8 +509,7 @@ private void parseListFilesResponse(final InputStream stream) } try { - final ObjectMapper objectMapper = new ObjectMapper(); - this.listResultSchema = objectMapper.readValue(stream, + this.listResultSchema = JacksonUtil.getSharedReader().readValue(stream, ListResultSchema.class); } catch (IOException ex) { log.error("Unable to deserialize list results", ex); diff --git a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/main/java/org/apache/hadoop/tools/dynamometer/DynoInfraUtils.java b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/main/java/org/apache/hadoop/tools/dynamometer/DynoInfraUtils.java index f6c8a6ac4d..04e98754ca 100644 --- a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/main/java/org/apache/hadoop/tools/dynamometer/DynoInfraUtils.java +++ b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/main/java/org/apache/hadoop/tools/dynamometer/DynoInfraUtils.java @@ -51,6 +51,7 @@ import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Time; import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; @@ -484,7 +485,7 @@ static Set parseStaleDataNodeList(String liveNodeJsonString, final int blockThreshold, final Logger log) throws IOException { final Set dataNodesToReport = new HashSet<>(); - JsonFactory fac = new JsonFactory(); + JsonFactory fac = JacksonUtil.createBasicJsonFactory(); JsonParser parser = fac.createParser(IOUtils .toInputStream(liveNodeJsonString, StandardCharsets.UTF_8.name())); @@ -554,7 +555,7 @@ static String fetchNameNodeJMXValue(Properties nameNodeProperties, "Unable to retrieve JMX: " + conn.getResponseMessage()); } InputStream in = conn.getInputStream(); - JsonFactory fac = new JsonFactory(); + JsonFactory fac = JacksonUtil.createBasicJsonFactory(); JsonParser parser = fac.createParser(in); if (parser.nextToken() != JsonToken.START_OBJECT || parser.nextToken() != JsonToken.FIELD_NAME diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Anonymizer.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Anonymizer.java index 3c85a93ddb..dc0856cd58 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Anonymizer.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Anonymizer.java @@ -22,7 +22,6 @@ import java.io.OutputStream; import com.fasterxml.jackson.core.JsonEncoding; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.Version; import com.fasterxml.jackson.databind.ObjectMapper; @@ -36,6 +35,7 @@ import org.apache.hadoop.io.compress.CompressionCodecFactory; import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.mapreduce.ID; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.tools.rumen.datatypes.*; @@ -55,8 +55,7 @@ public class Anonymizer extends Configured implements Tool { private StatePool statePool; private ObjectMapper outMapper = null; - private JsonFactory outFactory = null; - + private void initialize(String[] args) throws Exception { try { for (int i = 0; i < args.length; ++i) { @@ -85,7 +84,7 @@ private void initialize(String[] args) throws Exception { // initialize the state manager after the anonymizers are registered statePool.initialize(getConf()); - outMapper = new ObjectMapper(); + outMapper = JacksonUtil.createBasicObjectMapper(); // define a module SimpleModule module = new SimpleModule( "Anonymization Serializer", new Version(0, 1, 1, "FINAL", "", "")); @@ -104,8 +103,6 @@ private void initialize(String[] args) throws Exception { // register the module with the object-mapper outMapper.registerModule(module); - - outFactory = outMapper.getFactory(); } // anonymize the job trace file @@ -191,7 +188,7 @@ private JsonGenerator createJsonGenerator(Configuration conf, Path path) } JsonGenerator outGen = - outFactory.createGenerator(output, JsonEncoding.UTF8); + outMapper.createGenerator(output, JsonEncoding.UTF8); outGen.useDefaultPrettyPrinter(); return outGen; diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperParser.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperParser.java index f95878dde9..3d644b5ad2 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperParser.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperParser.java @@ -26,6 +26,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.util.JacksonUtil; /** * A simple wrapper for parsing JSON-encoded data using ObjectMapper. @@ -48,10 +49,10 @@ class JsonObjectMapperParser implements Closeable { */ public JsonObjectMapperParser(Path path, Class clazz, Configuration conf) throws IOException { - mapper = new ObjectMapper(); + mapper = JacksonUtil.createBasicObjectMapper(); this.clazz = clazz; InputStream input = new PossiblyDecompressedInputStream(path, conf); - jsonParser = mapper.getFactory().createParser(input); + jsonParser = mapper.createParser(input); } /** @@ -62,9 +63,9 @@ public JsonObjectMapperParser(Path path, Class clazz, */ public JsonObjectMapperParser(InputStream input, Class clazz) throws IOException { - mapper = new ObjectMapper(); + mapper = JacksonUtil.createBasicObjectMapper(); this.clazz = clazz; - jsonParser = mapper.getFactory().createParser(input); + jsonParser = mapper.createParser(input); } /** diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperWriter.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperWriter.java index 747b141fd9..e0caa18fff 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperWriter.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperWriter.java @@ -30,6 +30,7 @@ import org.apache.hadoop.tools.rumen.datatypes.DataType; import org.apache.hadoop.tools.rumen.serializers.DefaultRumenSerializer; import org.apache.hadoop.tools.rumen.serializers.ObjectStringSerializer; +import org.apache.hadoop.util.JacksonUtil; /** * Simple wrapper around {@link JsonGenerator} to write objects in JSON format. @@ -39,7 +40,7 @@ public class JsonObjectMapperWriter implements Closeable { private JsonGenerator writer; public JsonObjectMapperWriter(OutputStream output, boolean prettyPrint) throws IOException { - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); // define a module SimpleModule module = new SimpleModule( @@ -53,7 +54,7 @@ public JsonObjectMapperWriter(OutputStream output, boolean prettyPrint) throws I // register the module with the object-mapper mapper.registerModule(module); - writer = mapper.getFactory().createGenerator(output, JsonEncoding.UTF8); + writer = mapper.createGenerator(output, JsonEncoding.UTF8); if (prettyPrint) { writer.useDefaultPrettyPrinter(); } diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java index ab6f8942e7..0c594afc3b 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java @@ -30,7 +30,6 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.core.JsonEncoding; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.Version; @@ -44,6 +43,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.tools.rumen.Anonymizer; import org.apache.hadoop.tools.rumen.datatypes.DataType; +import org.apache.hadoop.util.JacksonUtil; /** * A pool of states. States used by {@link DataType}'s can be managed the @@ -206,7 +206,7 @@ private boolean reloadState(Path stateFile, Configuration configuration) } private void read(DataInput in) throws IOException { - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); // define a module SimpleModule module = new SimpleModule("State Serializer", new Version(0, 1, 1, "FINAL", "", "")); @@ -216,7 +216,7 @@ private void read(DataInput in) throws IOException { // register the module with the object-mapper mapper.registerModule(module); - JsonParser parser = mapper.getFactory().createParser((InputStream)in); + JsonParser parser = mapper.createParser((InputStream)in); StatePool statePool = mapper.readValue(parser, StatePool.class); this.setStates(statePool.getStates()); parser.close(); @@ -273,7 +273,7 @@ public void persist() throws IOException { private void write(DataOutput out) throws IOException { // This is just a JSON experiment System.out.println("Dumping the StatePool's in JSON format."); - ObjectMapper outMapper = new ObjectMapper(); + ObjectMapper outMapper = JacksonUtil.createBasicObjectMapper(); // define a module SimpleModule module = new SimpleModule("State Serializer", new Version(0, 1, 1, "FINAL", "", "")); @@ -283,9 +283,8 @@ private void write(DataOutput out) throws IOException { // register the module with the object-mapper outMapper.registerModule(module); - JsonFactory outFactory = outMapper.getFactory(); JsonGenerator jGen = - outFactory.createGenerator((OutputStream)out, JsonEncoding.UTF8); + outMapper.createGenerator((OutputStream)out, JsonEncoding.UTF8); jGen.useDefaultPrettyPrinter(); jGen.writeObject(this); diff --git a/hadoop-tools/hadoop-rumen/src/test/java/org/apache/hadoop/tools/rumen/TestHistograms.java b/hadoop-tools/hadoop-rumen/src/test/java/org/apache/hadoop/tools/rumen/TestHistograms.java index 187251900b..db6d47cf07 100644 --- a/hadoop-tools/hadoop-rumen/src/test/java/org/apache/hadoop/tools/rumen/TestHistograms.java +++ b/hadoop-tools/hadoop-rumen/src/test/java/org/apache/hadoop/tools/rumen/TestHistograms.java @@ -23,7 +23,6 @@ import java.util.List; import com.fasterxml.jackson.core.JsonEncoding; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.ObjectMapper; @@ -141,9 +140,8 @@ public static void main(String[] args) throws IOException { Path goldFilePath = new Path(filePath.getParent(), "gold"+testName); ObjectMapper mapper = new ObjectMapper(); - JsonFactory factory = mapper.getFactory(); FSDataOutputStream ostream = lfs.create(goldFilePath, true); - JsonGenerator gen = factory.createGenerator((OutputStream)ostream, + JsonGenerator gen = mapper.createGenerator((OutputStream)ostream, JsonEncoding.UTF8); gen.useDefaultPrettyPrinter(); diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/AMRunner.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/AMRunner.java index 2dc09de665..0d943471c6 100644 --- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/AMRunner.java +++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/AMRunner.java @@ -16,13 +16,13 @@ package org.apache.hadoop.yarn.sls; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.databind.JavaType; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.tools.rumen.JobTraceReader; import org.apache.hadoop.tools.rumen.LoggedJob; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ReservationId; @@ -44,11 +44,8 @@ import java.io.InputStreamReader; import java.io.Reader; import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.Iterator; -import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; @@ -122,15 +119,14 @@ public void startAM() throws YarnException, IOException { * Parse workload from a SLS trace file. */ private void startAMFromSLSTrace(String inputTrace) throws IOException { - JsonFactory jsonF = new JsonFactory(); - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); try (Reader input = new InputStreamReader( new FileInputStream(inputTrace), StandardCharsets.UTF_8)) { JavaType type = mapper.getTypeFactory(). constructMapType(Map.class, String.class, String.class); Iterator> jobIter = mapper.readValues( - jsonF.createParser(input), type); + mapper.createParser(input), type); while (jobIter.hasNext()) { try { diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/RumenToSLSConverter.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/RumenToSLSConverter.java index 2cdfe236c4..9b25275912 100644 --- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/RumenToSLSConverter.java +++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/RumenToSLSConverter.java @@ -35,7 +35,6 @@ import java.util.TreeMap; import java.util.TreeSet; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.commons.cli.CommandLine; @@ -44,6 +43,7 @@ import org.apache.commons.cli.Options; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.sls.utils.SLSUtils; @Private @@ -126,10 +126,10 @@ private static void generateSLSLoadFile(String inputFile, String outputFile) StandardCharsets.UTF_8)) { try (Writer output = new OutputStreamWriter(new FileOutputStream(outputFile), StandardCharsets.UTF_8)) { - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter(); Iterator i = mapper.readValues( - new JsonFactory().createParser(input), Map.class); + mapper.createParser(input), Map.class); while (i.hasNext()) { Map m = i.next(); output.write(writer.writeValueAsString(createSLSJob(m)) + EOL); @@ -143,7 +143,7 @@ private static void generateSLSNodeFile(String outputFile) throws IOException { try (Writer output = new OutputStreamWriter(new FileOutputStream(outputFile), StandardCharsets.UTF_8)) { - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter(); for (Map.Entry> entry : rackNodeMap.entrySet()) { Map rack = new LinkedHashMap(); diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java index 18b1c034bd..58f8b59ba6 100644 --- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java +++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java @@ -34,6 +34,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.tools.rumen.JobStory; import org.apache.hadoop.tools.rumen.JobStoryProducer; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.api.records.ExecutionType; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.sls.appmaster.MRAMSimulator; @@ -88,7 +89,8 @@ public SynthTraceJobProducer(Configuration conf, Path path) JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder(); jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true); - ObjectMapper mapper = new ObjectMapper(jsonFactoryBuilder.build()); + + ObjectMapper mapper = JacksonUtil.createObjectMapper(jsonFactoryBuilder.build()); mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false); FileSystem ifs = path.getFileSystem(conf); diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/utils/SLSUtils.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/utils/SLSUtils.java index af0b4f6caf..676ef13b5a 100644 --- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/utils/SLSUtils.java +++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/utils/SLSUtils.java @@ -34,7 +34,6 @@ import java.util.Map; import java.util.Set; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; @@ -45,6 +44,7 @@ import org.apache.hadoop.tools.rumen.LoggedJob; import org.apache.hadoop.tools.rumen.LoggedTask; import org.apache.hadoop.tools.rumen.LoggedTaskAttempt; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.api.records.NodeLabel; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceInformation; @@ -120,12 +120,11 @@ public static Set parseNodesFromRumenTrace( public static Set parseNodesFromSLSTrace( String jobTrace) throws IOException { Set nodeSet = new HashSet<>(); - JsonFactory jsonF = new JsonFactory(); - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); Reader input = new InputStreamReader(new FileInputStream(jobTrace), StandardCharsets.UTF_8); try { - Iterator i = mapper.readValues(jsonF.createParser(input), Map.class); + Iterator i = mapper.readValues(mapper.createParser(input), Map.class); while (i.hasNext()) { addNodes(nodeSet, i.next()); } @@ -167,12 +166,11 @@ private static void addNodes(Set nodeSet, public static Set parseNodesFromNodeFile( String nodeFile, Resource nmDefaultResource) throws IOException { Set nodeSet = new HashSet<>(); - JsonFactory jsonF = new JsonFactory(); - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); Reader input = new InputStreamReader(new FileInputStream(nodeFile), StandardCharsets.UTF_8); try { - Iterator i = mapper.readValues(jsonF.createParser(input), Map.class); + Iterator i = mapper.readValues(mapper.createParser(input), Map.class); while (i.hasNext()) { Map jsonE = i.next(); String rack = "/" + jsonE.get("rack"); diff --git a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSynthJobGeneration.java b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSynthJobGeneration.java index dd12a10f94..f690808f8e 100644 --- a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSynthJobGeneration.java +++ b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSynthJobGeneration.java @@ -18,6 +18,7 @@ package org.apache.hadoop.yarn.sls; import org.apache.commons.math3.random.JDKRandomGenerator; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.api.records.ExecutionType; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.sls.synthetic.SynthJob; @@ -60,7 +61,7 @@ public void testWorkloadGenerateTime() JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder(); jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true); - ObjectMapper mapper = new ObjectMapper(jsonFactoryBuilder.build()); + ObjectMapper mapper = JacksonUtil.createObjectMapper(jsonFactoryBuilder.build()); mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false); SynthTraceJobProducer.Workload wl = mapper.readValue(workloadJson, SynthTraceJobProducer.Workload.class); @@ -181,7 +182,7 @@ public void testSample() throws IOException { JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder(); jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true); - ObjectMapper mapper = new ObjectMapper(jsonFactoryBuilder.build()); + ObjectMapper mapper = JacksonUtil.createObjectMapper(jsonFactoryBuilder.build()); mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false); JDKRandomGenerator rand = new JDKRandomGenerator(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/application/AppCatalogSolrClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/application/AppCatalogSolrClient.java index ac8dbbac61..ea7a0ecdef 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/application/AppCatalogSolrClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/application/AppCatalogSolrClient.java @@ -28,6 +28,7 @@ import java.util.Properties; import java.util.Random; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.appcatalog.model.AppEntry; import org.apache.hadoop.yarn.appcatalog.model.AppStoreEntry; import org.apache.hadoop.yarn.appcatalog.model.Application; @@ -57,6 +58,18 @@ public class AppCatalogSolrClient { private static final Logger LOG = LoggerFactory.getLogger(AppCatalogSolrClient.class); private static String urlString; + /** + * It is more performant to reuse ObjectMapper instances but keeping the instance + * private makes it harder for someone to reconfigure it which might have unwanted + * side effects. + */ + private static final ObjectMapper OBJECT_MAPPER; + + static { + OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); + OBJECT_MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + } + public AppCatalogSolrClient() { // Locate Solr URL ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); @@ -146,8 +159,6 @@ public List search(String keyword) { public List listAppEntries() { List list = new ArrayList(); - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); SolrClient solr = getSolrClient(); SolrQuery query = new SolrQuery(); @@ -164,7 +175,7 @@ public List listAppEntries() { entry.setId(d.get("id").toString()); entry.setName(d.get("name_s").toString()); entry.setApp(d.get("app_s").toString()); - entry.setYarnfile(mapper.readValue(d.get("yarnfile_s").toString(), + entry.setYarnfile(OBJECT_MAPPER.readValue(d.get("yarnfile_s").toString(), Service.class)); list.add(entry); } @@ -176,8 +187,6 @@ public List listAppEntries() { public AppStoreEntry findAppStoreEntry(String id) { AppStoreEntry entry = new AppStoreEntry(); - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); SolrClient solr = getSolrClient(); SolrQuery query = new SolrQuery(); @@ -197,7 +206,7 @@ public AppStoreEntry findAppStoreEntry(String id) { entry.setDesc(d.get("desc_s").toString()); entry.setLike(Integer.parseInt(d.get("like_i").toString())); entry.setDownload(Integer.parseInt(d.get("download_i").toString())); - Service yarnApp = mapper.readValue(d.get("yarnfile_s").toString(), + Service yarnApp = OBJECT_MAPPER.readValue(d.get("yarnfile_s").toString(), Service.class); String name; try { @@ -222,9 +231,6 @@ public AppStoreEntry findAppStoreEntry(String id) { public AppEntry findAppEntry(String id) { AppEntry entry = new AppEntry(); - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); - SolrClient solr = getSolrClient(); SolrQuery query = new SolrQuery(); query.setQuery("id:" + id); @@ -240,7 +246,7 @@ public AppEntry findAppEntry(String id) { entry.setId(d.get("id").toString()); entry.setApp(d.get("app_s").toString()); entry.setName(d.get("name_s").toString()); - entry.setYarnfile(mapper.readValue(d.get("yarnfile_s").toString(), + entry.setYarnfile(OBJECT_MAPPER.readValue(d.get("yarnfile_s").toString(), Service.class)); } } catch (SolrServerException | IOException e) { @@ -252,8 +258,6 @@ public AppEntry findAppEntry(String id) { public void deployApp(String id, Service service) throws SolrServerException, IOException { long download = 0; - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); Collection docs = new HashSet(); SolrClient solr = getSolrClient(); // Find application information from AppStore @@ -287,7 +291,7 @@ public void deployApp(String id, Service service) throws SolrServerException, request.addField("id", name); request.addField("name_s", name); request.addField("app_s", entry.getOrg()+"/"+entry.getName()); - request.addField("yarnfile_s", mapper.writeValueAsString(service)); + request.addField("yarnfile_s", OBJECT_MAPPER.writeValueAsString(service)); docs.add(request); } @@ -326,8 +330,6 @@ public void deleteApp(String id) { public void register(Application app) throws IOException { Collection docs = new HashSet(); SolrClient solr = getSolrClient(); - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); try { SolrInputDocument buffer = new SolrInputDocument(); buffer.setField("id", java.util.UUID.randomUUID().toString() @@ -343,10 +345,10 @@ public void register(Application app) throws IOException { buffer.setField("download_i", 0); // Keep only YARN data model for yarnfile field - String yarnFile = mapper.writeValueAsString(app); - LOG.info("app:"+yarnFile); - Service yarnApp = mapper.readValue(yarnFile, Service.class); - buffer.setField("yarnfile_s", mapper.writeValueAsString(yarnApp)); + String yarnFile = OBJECT_MAPPER.writeValueAsString(app); + LOG.info("app:{}", yarnFile); + Service yarnApp = OBJECT_MAPPER.readValue(yarnFile, Service.class); + buffer.setField("yarnfile_s", OBJECT_MAPPER.writeValueAsString(yarnApp)); docs.add(buffer); commitSolrChanges(solr, docs); @@ -359,8 +361,6 @@ public void register(Application app) throws IOException { protected void register(AppStoreEntry app) throws IOException { Collection docs = new HashSet(); SolrClient solr = getSolrClient(); - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); try { SolrInputDocument buffer = new SolrInputDocument(); buffer.setField("id", java.util.UUID.randomUUID().toString() @@ -376,10 +376,10 @@ protected void register(AppStoreEntry app) throws IOException { buffer.setField("download_i", app.getDownload()); // Keep only YARN data model for yarnfile field - String yarnFile = mapper.writeValueAsString(app); - LOG.info("app:"+yarnFile); - Service yarnApp = mapper.readValue(yarnFile, Service.class); - buffer.setField("yarnfile_s", mapper.writeValueAsString(yarnApp)); + String yarnFile = OBJECT_MAPPER.writeValueAsString(app); + LOG.info("app:{}", yarnFile); + Service yarnApp = OBJECT_MAPPER.readValue(yarnFile, Service.class); + buffer.setField("yarnfile_s", OBJECT_MAPPER.writeValueAsString(yarnApp)); docs.add(buffer); commitSolrChanges(solr, docs); @@ -391,8 +391,6 @@ protected void register(AppStoreEntry app) throws IOException { public void upgradeApp(Service service) throws IOException, SolrServerException { - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); Collection docs = new HashSet(); SolrClient solr = getSolrClient(); if (service!=null) { @@ -420,7 +418,7 @@ public void upgradeApp(Service service) throws IOException, request.addField("id", name); request.addField("name_s", name); request.addField("app_s", app); - request.addField("yarnfile_s", mapper.writeValueAsString(service)); + request.addField("yarnfile_s", OBJECT_MAPPER.writeValueAsString(service)); docs.add(request); } try { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/application/YarnServiceClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/application/YarnServiceClient.java index 185b1c8dde..57c4b353d0 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/application/YarnServiceClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/application/YarnServiceClient.java @@ -23,6 +23,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.appcatalog.model.AppEntry; import org.apache.hadoop.yarn.service.api.records.Service; import org.apache.hadoop.yarn.service.api.records.ServiceState; @@ -46,6 +47,19 @@ public class YarnServiceClient { private static final Logger LOG = LoggerFactory.getLogger(YarnServiceClient.class); + + /** + * It is more performant to reuse ObjectMapper instances but keeping the instance + * private makes it harder for someone to reconfigure it which might have unwanted + * side effects. + */ + private static final ObjectMapper OBJECT_MAPPER; + + static { + OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); + OBJECT_MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + } + private static Configuration conf = new Configuration(); private static ClientConfig getClientConfig() { ClientConfig config = new DefaultClientConfig(); @@ -66,8 +80,6 @@ public YarnServiceClient() { } public void createApp(Service app) { - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); ClientResponse response; try { boolean useKerberos = UserGroupInformation.isSecurityEnabled(); @@ -90,7 +102,7 @@ public void createApp(Service app) { app.setKerberosPrincipal(kerberos); } response = asc.getApiClient().post(ClientResponse.class, - mapper.writeValueAsString(app)); + OBJECT_MAPPER.writeValueAsString(app)); if (response.getStatus() >= 299) { String message = response.getEntity(String.class); throw new RuntimeException("Failed : HTTP error code : " @@ -119,10 +131,8 @@ public void deleteApp(String appInstanceId) { } public void restartApp(Service app) throws JsonProcessingException { - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); String appInstanceId = app.getName(); - String yarnFile = mapper.writeValueAsString(app); + String yarnFile = OBJECT_MAPPER.writeValueAsString(app); ClientResponse response; try { response = asc.getApiClient(asc.getServicePath(appInstanceId)) @@ -139,10 +149,8 @@ public void restartApp(Service app) throws JsonProcessingException { } public void stopApp(Service app) throws JsonProcessingException { - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); String appInstanceId = app.getName(); - String yarnFile = mapper.writeValueAsString(app); + String yarnFile = OBJECT_MAPPER.writeValueAsString(app); ClientResponse response; try { response = asc.getApiClient(asc.getServicePath(appInstanceId)) @@ -159,14 +167,12 @@ public void stopApp(Service app) throws JsonProcessingException { } public void getStatus(AppEntry entry) { - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); String appInstanceId = entry.getName(); Service app = null; try { String yarnFile = asc.getApiClient(asc.getServicePath(appInstanceId)) .get(String.class); - app = mapper.readValue(yarnFile, Service.class); + app = OBJECT_MAPPER.readValue(yarnFile, Service.class); entry.setYarnfile(app); } catch (UniformInterfaceException | IOException e) { LOG.error("Error in fetching application status: ", e); @@ -174,11 +180,9 @@ public void getStatus(AppEntry entry) { } public void upgradeApp(Service app) throws JsonProcessingException { - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); String appInstanceId = app.getName(); app.setState(ServiceState.EXPRESS_UPGRADING); - String yarnFile = mapper.writeValueAsString(app); + String yarnFile = OBJECT_MAPPER.writeValueAsString(app); ClientResponse response; try { response = asc.getApiClient(asc.getServicePath(appInstanceId)) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/component/instance/ComponentInstance.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/component/instance/ComponentInstance.java index cab4870493..1e30fbd5ba 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/component/instance/ComponentInstance.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/component/instance/ComponentInstance.java @@ -26,6 +26,7 @@ import org.apache.hadoop.registry.client.binding.RegistryPathUtils; import org.apache.hadoop.registry.client.types.ServiceRecord; import org.apache.hadoop.registry.client.types.yarn.PersistencePolicies; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerExitStatus; @@ -875,7 +876,7 @@ public void updateContainerStatus(ContainerStatus status) { doRegistryUpdate = false; } } - ObjectMapper mapper = new ObjectMapper(); + final ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); try { Map>> ports = null; ports = mapper.readValue(status.getExposedPorts(), diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/JsonSerDeser.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/JsonSerDeser.java index 254d6c5d37..cf3d785a22 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/JsonSerDeser.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/JsonSerDeser.java @@ -30,6 +30,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; +import org.apache.hadoop.util.JacksonUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -61,9 +62,10 @@ public class JsonSerDeser { @SuppressWarnings("deprecation") public JsonSerDeser(Class classType) { this.classType = classType; - this.mapper = new ObjectMapper(); + this.mapper = JacksonUtil.createBasicObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.configure(SerializationFeature.WRITE_NULL_MAP_VALUES, false); + mapper.configure(SerializationFeature.INDENT_OUTPUT, true); } public JsonSerDeser(Class classType, PropertyNamingStrategy namingStrategy) { @@ -231,7 +233,6 @@ private void writeJsonAsBytes(T instance, * @throws JsonProcessingException parse problems */ public String toJson(T instance) throws JsonProcessingException { - mapper.configure(SerializationFeature.INDENT_OUTPUT, true); return mapper.writeValueAsString(instance); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfiguration.java index e7ec2d6f5e..ac30480fd8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfiguration.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfiguration.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.service.exceptions.BadConfigException; import java.io.IOException; @@ -41,6 +42,18 @@ @JsonInclude(value = JsonInclude.Include.NON_NULL) public class PublishedConfiguration { + /** + * It is more performant to reuse ObjectMapper instances but keeping the instance + * private makes it harder for someone to reconfigure it which might have unwanted + * side effects. + */ + private static final ObjectMapper OBJECT_MAPPER; + + static { + OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); + OBJECT_MAPPER.configure(SerializationFeature.INDENT_OUTPUT, true); + } + public String description; public long updated; @@ -154,9 +167,7 @@ public Properties asProperties() { * @throws IOException marshalling failure */ public String asJson() throws IOException { - ObjectMapper mapper = new ObjectMapper(); - mapper.configure(SerializationFeature.INDENT_OUTPUT, true); - String json = mapper.writeValueAsString(entries); + String json = OBJECT_MAPPER.writeValueAsString(entries); return json; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java index b92f4e4123..dc60f9b274 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java @@ -49,6 +49,7 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.Time; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; @@ -60,7 +61,6 @@ import org.apache.hadoop.yarn.exceptions.YarnException; import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.util.MinimalPrettyPrinter; import com.fasterxml.jackson.databind.ObjectMapper; @@ -274,7 +274,7 @@ public void flush() throws IOException { } private ObjectMapper createObjectMapper() { - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); mapper.setAnnotationIntrospector( new JaxbAnnotationIntrospector(TypeFactory.defaultInstance())); mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); @@ -365,8 +365,8 @@ public long getLastModifiedTime() { protected void prepareForWrite() throws IOException{ this.stream = createLogFileStream(fs, logPath); - this.jsonGenerator = new JsonFactory().createGenerator( - (OutputStream)stream); + this.jsonGenerator = JacksonUtil.getSharedWriter() + .createGenerator((OutputStream)stream); this.jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n")); this.lastModifiedTime = Time.monotonicNow(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java index 45da0f444b..0264e40c7b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java @@ -30,6 +30,7 @@ import org.apache.commons.cli.Options; import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler; import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler; +import org.apache.hadoop.util.JacksonUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.classification.InterfaceAudience.Private; @@ -62,7 +63,7 @@ public class TimelineClientImpl extends TimelineClient { private static final Logger LOG = LoggerFactory.getLogger(TimelineClientImpl.class); - private static final ObjectMapper MAPPER = new ObjectMapper(); + private static final ObjectMapper MAPPER = JacksonUtil.createBasicObjectMapper(); private static final String RESOURCE_URI_STR_V1 = "/ws/v1/timeline/"; private static Options opts; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java index 6351cb69c8..83b6a09607 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java @@ -27,9 +27,9 @@ import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.security.DockerCredentialTokenIdentifier; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -96,9 +96,8 @@ public static Credentials readCredentialsFromConfigFile(Path configFile, } // Parse the JSON and create the Tokens/Credentials. - ObjectMapper mapper = new ObjectMapper(); - JsonFactory factory = mapper.getFactory(); - JsonParser parser = factory.createParser(contents); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); + JsonParser parser = mapper.createParser(contents); JsonNode rootNode = mapper.readTree(parser); Credentials credentials = new Credentials(); @@ -161,7 +160,7 @@ public static boolean writeDockerCredentialsToPath(File outConfigFile, Credentials credentials) throws IOException { boolean foundDockerCred = false; if (credentials.numberOfTokens() > 0) { - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); ObjectNode rootNode = mapper.createObjectNode(); ObjectNode registryUrlNode = mapper.createObjectNode(); for (Token tk : credentials.getAllTokens()) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java index 14b9b0ceb7..a36b96dca2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java @@ -31,6 +31,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Text; import org.apache.hadoop.security.SecurityUtil; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.VersionInfo; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.timeline.TimelineAbout; @@ -53,11 +54,10 @@ public class TimelineUtils { "TIMELINE_FLOW_RUN_ID_TAG"; public final static String DEFAULT_FLOW_VERSION = "1"; - private static ObjectMapper mapper; + private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); static { - mapper = new ObjectMapper(); - YarnJacksonJaxbJsonProvider.configObjectMapper(mapper); + YarnJacksonJaxbJsonProvider.configObjectMapper(OBJECT_MAPPER); } /** @@ -90,9 +90,9 @@ public static String dumpTimelineRecordtoJSON(Object o) public static String dumpTimelineRecordtoJSON(Object o, boolean pretty) throws JsonGenerationException, JsonMappingException, IOException { if (pretty) { - return mapper.writerWithDefaultPrettyPrinter().writeValueAsString(o); + return OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(o); } else { - return mapper.writeValueAsString(o); + return OBJECT_MAPPER.writeValueAsString(o); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Controller.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Controller.java index ad80a2eefe..bf5500892d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Controller.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Controller.java @@ -28,8 +28,8 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.webapp.view.DefaultPage; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -42,7 +42,6 @@ @InterfaceAudience.LimitedPrivate({"YARN", "MapReduce"}) public abstract class Controller implements Params { public static final Logger LOG = LoggerFactory.getLogger(Controller.class); - static final ObjectMapper jsonMapper = new ObjectMapper(); @RequestScoped public static class RequestContext{ @@ -225,7 +224,7 @@ protected void renderJSON(Object object) { context().rendered = true; context().response.setContentType(MimeType.JSON); try { - jsonMapper.writeValue(writer(), object); + JacksonUtil.getSharedWriter().writeValue(writer(), object); } catch (Exception e) { throw new WebAppException(e); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/GenericObjectMapper.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/GenericObjectMapper.java index fdafcf0cd1..440c5d6f06 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/GenericObjectMapper.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/GenericObjectMapper.java @@ -19,11 +19,11 @@ import java.io.IOException; -import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.util.JacksonUtil; /** * A utility class providing methods for serializing and deserializing @@ -38,14 +38,8 @@ public class GenericObjectMapper { private static final byte[] EMPTY_BYTES = new byte[0]; - public static final ObjectReader OBJECT_READER; - public static final ObjectWriter OBJECT_WRITER; - - static { - ObjectMapper mapper = new ObjectMapper(); - OBJECT_READER = mapper.reader(Object.class); - OBJECT_WRITER = mapper.writer(); - } + public static final ObjectReader OBJECT_READER = JacksonUtil.createBasicReaderFor(Object.class); + public static final ObjectWriter OBJECT_WRITER = JacksonUtil.getSharedWriter(); /** * Serializes an Object into a byte array. Along with {@link #read(byte[])}, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/AuxServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/AuxServices.java index 794ef9d9a4..cbbc33706d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/AuxServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/AuxServices.java @@ -43,6 +43,7 @@ import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.security.authorize.AccessControlList; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.server.nodemanager.containermanager.records.AuxServiceConfiguration; import org.apache.hadoop.yarn.server.nodemanager.containermanager.records.AuxServiceFile; import org.apache.hadoop.yarn.server.nodemanager.containermanager.records.AuxServiceRecord; @@ -135,7 +136,7 @@ public class AuxServices extends AbstractService this.dirsHandler = nmContext.getLocalDirsHandler(); this.delService = deletionService; this.userUGI = getRemoteUgi(); - this.mapper = new ObjectMapper(); + this.mapper = JacksonUtil.createBasicObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); // Obtain services from configuration in init() } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkTagMappingJsonManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkTagMappingJsonManager.java index cc2ded4422..3b4e26eda1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkTagMappingJsonManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkTagMappingJsonManager.java @@ -28,11 +28,11 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container; @@ -58,9 +58,8 @@ public void initialize(Configuration conf) { + " we have to set the configuration:" + YarnConfiguration.NM_NETWORK_TAG_MAPPING_FILE_PATH); } - ObjectMapper mapper = new ObjectMapper(); try { - networkTagMapping = mapper.readValue(new File(mappingJsonFile), + networkTagMapping = JacksonUtil.getSharedReader().readValue(new File(mappingJsonFile), NetworkTagMapping.class); } catch (Exception e) { throw new YarnRuntimeException(e); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/RuncContainerRuntime.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/RuncContainerRuntime.java index 2c327c04eb..86bb5113dd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/RuncContainerRuntime.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/RuncContainerRuntime.java @@ -27,6 +27,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.protocol.datatransfer.IOStreamPair; import org.apache.hadoop.security.authorize.AccessControlList; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.concurrent.HadoopExecutors; import org.apache.hadoop.yarn.api.records.ApplicationId; @@ -91,6 +92,7 @@ import static org.apache.hadoop.yarn.conf.YarnConfiguration.NM_RUNC_MANIFEST_TO_RESOURCES_PLUGIN; import static org.apache.hadoop.yarn.conf.YarnConfiguration.NM_REAP_RUNC_LAYER_MOUNTS_INTERVAL; import static org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.LinuxContainerRuntimeConstants.*; + /** *

This class is an extension of {@link OCIContainerRuntime} that uses the * native {@code container-executor} binary via a @@ -206,7 +208,7 @@ public void initialize(Configuration configuration, Context nmCtx) imageTagToManifestPlugin.init(conf); manifestToResourcesPlugin = chooseManifestToResourcesPlugin(); manifestToResourcesPlugin.init(conf); - mapper = new ObjectMapper(); + mapper = JacksonUtil.createBasicObjectMapper(); defaultRuncImage = conf.get(YarnConfiguration.NM_RUNC_IMAGE_NAME); allowedNetworks.clear(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/ImageTagToManifestPlugin.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/ImageTagToManifestPlugin.java index 457939c9a1..bb21c45f73 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/ImageTagToManifestPlugin.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/ImageTagToManifestPlugin.java @@ -26,6 +26,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.service.AbstractService; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.concurrent.HadoopExecutors; import java.io.BufferedReader; @@ -42,7 +43,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; -import com.fasterxml.jackson.databind.ObjectMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -65,7 +65,6 @@ public class ImageTagToManifestPlugin extends AbstractService implements RuncImageTagToManifestPlugin { private Map manifestCache; - private ObjectMapper objMapper; private AtomicReference> localImageToHashCache = new AtomicReference<>(new HashMap<>()); private AtomicReference> hdfsImageToHashCache = @@ -107,7 +106,7 @@ public ImageManifest getManifestFromImageTag(String imageTag) } byte[] bytes = IOUtils.toByteArray(input); - manifest = objMapper.readValue(bytes, ImageManifest.class); + manifest = JacksonUtil.getSharedReader().readValue(bytes, ImageManifest.class); manifestCache.put(hash, manifest); return manifest; @@ -279,7 +278,6 @@ protected void serviceInit(Configuration configuration) throws Exception { DEFAULT_NM_RUNC_IMAGE_TOPLEVEL_DIR) + "/manifests/"; int numManifestsToCache = conf.getInt(NM_RUNC_NUM_MANIFESTS_TO_CACHE, DEFAULT_NUM_MANIFESTS_TO_CACHE); - this.objMapper = new ObjectMapper(); this.manifestCache = Collections.synchronizedMap( new LRUCache(numManifestsToCache, 0.75f)); @@ -315,7 +313,7 @@ protected void serviceStop() throws Exception { } private static class LRUCache extends LinkedHashMap { - private int cacheSize; + private final int cacheSize; LRUCache(int initialCapacity, float loadFactor) { super(initialCapacity, loadFactor, true); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceProfilesManagerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceProfilesManagerImpl.java index 24cb34327b..8910ab48dd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceProfilesManagerImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceProfilesManagerImpl.java @@ -20,10 +20,10 @@ import org.apache.hadoop.classification.VisibleForTesting; -import com.fasterxml.jackson.databind.ObjectMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceInformation; import org.apache.hadoop.yarn.conf.YarnConfiguration; @@ -105,8 +105,7 @@ private void loadProfiles() throws IOException { resourcesFile = tmp.getPath(); } } - ObjectMapper mapper = new ObjectMapper(); - Map data = mapper.readValue(new File(resourcesFile), Map.class); + Map data = JacksonUtil.getSharedReader().readValue(new File(resourcesFile), Map.class); Iterator iterator = data.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry entry = (Map.Entry) iterator.next(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/MappingRuleCreator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/MappingRuleCreator.java index 174577099e..0fa10570d0 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/MappingRuleCreator.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/MappingRuleCreator.java @@ -27,6 +27,7 @@ import java.util.List; import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.server.resourcemanager.placement.csmappingrule.MappingRule; import org.apache.hadoop.yarn.server.resourcemanager.placement.csmappingrule.MappingRuleAction; import org.apache.hadoop.yarn.server.resourcemanager.placement.csmappingrule.MappingRuleActions; @@ -43,7 +44,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.classification.VisibleForTesting; public class MappingRuleCreator { @@ -58,14 +58,12 @@ public MappingRulesDescription getMappingRulesFromJsonFile(String filePath) MappingRulesDescription getMappingRulesFromJson(byte[] contents) throws IOException { - ObjectMapper objectMapper = new ObjectMapper(); - return objectMapper.readValue(contents, MappingRulesDescription.class); + return JacksonUtil.getSharedReader().readValue(contents, MappingRulesDescription.class); } MappingRulesDescription getMappingRulesFromJson(String contents) throws IOException { - ObjectMapper objectMapper = new ObjectMapper(); - return objectMapper.readValue(contents, MappingRulesDescription.class); + return JacksonUtil.getSharedReader().readValue(contents, MappingRulesDescription.class); } public List getMappingRulesFromFile(String jsonPath) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/converter/LegacyMappingRuleToJson.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/converter/LegacyMappingRuleToJson.java index 108d52bc40..6c963775be 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/converter/LegacyMappingRuleToJson.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/converter/LegacyMappingRuleToJson.java @@ -21,6 +21,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.QueuePath; @@ -52,9 +53,11 @@ public class LegacyMappingRuleToJson { public static final String JSON_NODE_MATCHES = "matches"; /** - * Our internal object mapper, used to create JSON nodes. + * It is more performant to reuse ObjectMapper instances but keeping the instance + * private makes it harder for someone to reconfigure it which might have unwanted + * side effects. */ - private ObjectMapper objectMapper = new ObjectMapper(); + private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); /** * Collection to store the legacy group mapping rule strings. @@ -138,8 +141,8 @@ public LegacyMappingRuleToJson setAppNameMappingRules( */ public String convert() { //creating the basic JSON config structure - ObjectNode rootNode = objectMapper.createObjectNode(); - ArrayNode rulesNode = objectMapper.createArrayNode(); + ObjectNode rootNode = OBJECT_MAPPER.createObjectNode(); + ArrayNode rulesNode = OBJECT_MAPPER.createArrayNode(); rootNode.set("rules", rulesNode); //Processing and adding all the user group mapping rules @@ -158,7 +161,7 @@ public String convert() { } try { - return objectMapper + return OBJECT_MAPPER .writerWithDefaultPrettyPrinter() .writeValueAsString(rootNode); } catch (JsonProcessingException e) { @@ -246,7 +249,7 @@ private String[] splitRule(String rule, int expectedParts) { * @return The object node with the preset fields */ private ObjectNode createDefaultRuleNode(String type) { - return objectMapper + return OBJECT_MAPPER .createObjectNode() .put("type", type) //All legacy rule fallback to place to default diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java index d801652377..6a16aac686 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java @@ -32,6 +32,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.security.authorize.AccessControlList; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.api.records.QueueACL; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.conf.YarnConfiguration; @@ -55,7 +56,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.fasterxml.jackson.core.util.DefaultPrettyPrinter; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.hadoop.classification.VisibleForTesting; @@ -327,14 +327,14 @@ private void performRuleConversion(FairScheduler fs) placementConverter.convertPlacementPolicy(placementManager, ruleHandler, capacitySchedulerConfig, usePercentages); - ObjectMapper mapper = new ObjectMapper(); + final ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); // close output stream if we write to a file, leave it open otherwise if (!consoleMode && rulesToFile) { mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, true); } else { mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false); } - ObjectWriter writer = mapper.writer(new DefaultPrettyPrinter()); + ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter(); if (consoleMode && rulesToFile) { System.out.println("======= " + MAPPING_RULES_JSON + " ======="); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityGroupFSTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityGroupFSTimelineStore.java index 1f4a9f42a9..7e49bd19ae 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityGroupFSTimelineStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityGroupFSTimelineStore.java @@ -42,6 +42,7 @@ import org.apache.hadoop.service.ServiceOperations; import org.apache.hadoop.ipc.CallerContext; import org.apache.hadoop.util.ApplicationClassLoader; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.Time; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; @@ -320,7 +321,7 @@ protected void serviceStart() throws Exception { } } - objMapper = new ObjectMapper(); + objMapper = JacksonUtil.createBasicObjectMapper(); objMapper.setAnnotationIntrospector( new JaxbAnnotationIntrospector(TypeFactory.defaultInstance())); jsonFactory = new MappingJsonFactory(objMapper); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/LevelDBCacheTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/LevelDBCacheTimelineStore.java index f84eeebbf0..8ee6d1864c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/LevelDBCacheTimelineStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/LevelDBCacheTimelineStore.java @@ -18,13 +18,13 @@ package org.apache.hadoop.yarn.server.timeline; -import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils; @@ -298,7 +298,6 @@ public void close() throws IOException { } }; } - static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); @SuppressWarnings("unchecked") private V getEntityForKey(byte[] key) throws IOException { @@ -306,7 +305,7 @@ private V getEntityForKey(byte[] key) throws IOException { if (resultRaw == null) { return null; } - return (V) OBJECT_MAPPER.readValue(resultRaw, TimelineEntity.class); + return (V) JacksonUtil.getSharedReader().readValue(resultRaw, TimelineEntity.class); } private byte[] getStartTimeKey(K entityId) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/PluginStoreTestUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/PluginStoreTestUtils.java index cb887fe264..d3885c5bc8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/PluginStoreTestUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/PluginStoreTestUtils.java @@ -18,7 +18,6 @@ package org.apache.hadoop.yarn.server.timeline; import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.util.MinimalPrettyPrinter; import com.fasterxml.jackson.databind.ObjectMapper; @@ -31,6 +30,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.util.JacksonUtil; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; @@ -108,7 +108,7 @@ static FSDataOutputStream createLogFile(Path logPath, FileSystem fs) } static ObjectMapper createObjectMapper() { - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); mapper.setAnnotationIntrospector( new JaxbAnnotationIntrospector(TypeFactory.defaultInstance())); mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); @@ -230,10 +230,9 @@ static TimelineEvent createEvent(long timestamp, String type, Map T getTimelineRecordFromJSON( String jsonString, Class clazz) throws JsonGenerationException, JsonMappingException, IOException { - return mapper.readValue(jsonString, clazz); + return OBJECT_MAPPER.readValue(jsonString, clazz); } private static void fillFields(TimelineEntity finalEntity,