Revert "HADOOP-19231. Add JacksonUtil to manage Jackson classes (#6953)"

This reverts commit fa9bb0d1ac.
This commit is contained in:
Ayush Saxena 2024-08-29 00:29:19 +05:30
parent 0aab1a2976
commit 0837c84a9f
No known key found for this signature in database
GPG Key ID: D09AE71061AB564D
71 changed files with 296 additions and 392 deletions

View File

@ -22,6 +22,7 @@
import com.ctc.wstx.io.StreamBootstrapper; import com.ctc.wstx.io.StreamBootstrapper;
import com.ctc.wstx.io.SystemId; import com.ctc.wstx.io.SystemId;
import com.ctc.wstx.stax.WstxInputFactory; import com.ctc.wstx.stax.WstxInputFactory;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonGenerator;
import java.io.BufferedInputStream; import java.io.BufferedInputStream;
@ -100,7 +101,6 @@
import org.apache.hadoop.security.alias.CredentialProviderFactory; import org.apache.hadoop.security.alias.CredentialProviderFactory;
import org.apache.hadoop.thirdparty.com.google.common.base.Strings; import org.apache.hadoop.thirdparty.com.google.common.base.Strings;
import org.apache.hadoop.util.ConfigurationHelper; import org.apache.hadoop.util.ConfigurationHelper;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringInterner; import org.apache.hadoop.util.StringInterner;
@ -3792,7 +3792,8 @@ public static void dumpConfiguration(Configuration config,
throw new IllegalArgumentException("Property " + throw new IllegalArgumentException("Property " +
propertyName + " not found"); propertyName + " not found");
} else { } else {
JsonGenerator dumpGenerator = JacksonUtil.getSharedWriter().createGenerator(out); JsonFactory dumpFactory = new JsonFactory();
JsonGenerator dumpGenerator = dumpFactory.createGenerator(out);
dumpGenerator.writeStartObject(); dumpGenerator.writeStartObject();
dumpGenerator.writeFieldName("property"); dumpGenerator.writeFieldName("property");
appendJSONProperty(dumpGenerator, config, propertyName, appendJSONProperty(dumpGenerator, config, propertyName,
@ -3830,7 +3831,8 @@ public static void dumpConfiguration(Configuration config,
*/ */
public static void dumpConfiguration(Configuration config, public static void dumpConfiguration(Configuration config,
Writer out) throws IOException { Writer out) throws IOException {
JsonGenerator dumpGenerator = JacksonUtil.getSharedWriter().createGenerator(out); JsonFactory dumpFactory = new JsonFactory();
JsonGenerator dumpGenerator = dumpFactory.createGenerator(out);
dumpGenerator.writeStartObject(); dumpGenerator.writeStartObject();
dumpGenerator.writeFieldName("properties"); dumpGenerator.writeFieldName("properties");
dumpGenerator.writeStartArray(); dumpGenerator.writeStartArray();

View File

@ -42,7 +42,6 @@
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSelector; import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSelector;
import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL; import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL;
import org.apache.hadoop.util.HttpExceptionUtils; import org.apache.hadoop.util.HttpExceptionUtils;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.JsonSerialization; import org.apache.hadoop.util.JsonSerialization;
import org.apache.hadoop.util.KMSUtil; import org.apache.hadoop.util.KMSUtil;
import org.apache.http.client.utils.URIBuilder; import org.apache.http.client.utils.URIBuilder;
@ -80,6 +79,7 @@
import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension; import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension; import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.base.Strings; import org.apache.hadoop.thirdparty.com.google.common.base.Strings;
@ -595,10 +595,11 @@ private <T> T call(HttpURLConnection conn, Object jsonOutput,
&& conn.getContentType().trim().toLowerCase() && conn.getContentType().trim().toLowerCase()
.startsWith(APPLICATION_JSON_MIME) .startsWith(APPLICATION_JSON_MIME)
&& klass != null) { && klass != null) {
ObjectMapper mapper = new ObjectMapper();
InputStream is = null; InputStream is = null;
try { try {
is = conn.getInputStream(); is = conn.getInputStream();
ret = JacksonUtil.getSharedReader().readValue(is, klass); ret = mapper.readValue(is, klass);
} finally { } finally {
IOUtils.closeStream(is); IOUtils.closeStream(is);
} }

View File

@ -38,10 +38,10 @@
import javax.management.ObjectName; import javax.management.ObjectName;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter; import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.AtomicDoubleArray; import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.AtomicDoubleArray;
import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.commons.lang3.exception.ExceptionUtils;
@ -146,7 +146,7 @@ public class DecayRpcScheduler implements RpcScheduler,
public static final Logger LOG = public static final Logger LOG =
LoggerFactory.getLogger(DecayRpcScheduler.class); LoggerFactory.getLogger(DecayRpcScheduler.class);
private static final ObjectWriter WRITER = JacksonUtil.getSharedWriter(); private static final ObjectWriter WRITER = new ObjectMapper().writer();
// Track the decayed and raw (no decay) number of calls for each schedulable // Track the decayed and raw (no decay) number of calls for each schedulable
// identity from all previous decay windows: idx 0 for decayed call cost and // identity from all previous decay windows: idx 0 for decayed call cost and

View File

@ -121,7 +121,6 @@
import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.ExitUtil; import org.apache.hadoop.util.ExitUtil;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.ProtoUtil; import org.apache.hadoop.util.ProtoUtil;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
@ -131,6 +130,7 @@
import org.apache.hadoop.tracing.TraceScope; import org.apache.hadoop.tracing.TraceScope;
import org.apache.hadoop.tracing.Tracer; import org.apache.hadoop.tracing.Tracer;
import org.apache.hadoop.tracing.TraceUtils; import org.apache.hadoop.tracing.TraceUtils;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
@ -3843,8 +3843,9 @@ public int getNumOpenConnections() {
* @return Get the NumOpenConnections/User. * @return Get the NumOpenConnections/User.
*/ */
public String getNumOpenConnectionsPerUser() { public String getNumOpenConnectionsPerUser() {
ObjectMapper mapper = new ObjectMapper();
try { try {
return JacksonUtil.getSharedWriter() return mapper
.writeValueAsString(connectionManager.getUserToConnectionsMap()); .writeValueAsString(connectionManager.getUserToConnectionsMap());
} catch (IOException ignored) { } catch (IOException ignored) {
} }

View File

@ -43,13 +43,13 @@
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonGenerator;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.commons.lang3.NotImplementedException; import org.apache.commons.lang3.NotImplementedException;
import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.http.HttpServer2;
import org.apache.hadoop.util.JacksonUtil;
/* /*
* This servlet is based off of the JMXProxyServlet from Tomcat 7.0.14. It has * This servlet is based off of the JMXProxyServlet from Tomcat 7.0.14. It has
@ -134,6 +134,11 @@ public class JMXJsonServlet extends HttpServlet {
*/ */
protected transient MBeanServer mBeanServer; protected transient MBeanServer mBeanServer;
/**
* Json Factory to create Json generators for write objects in json format
*/
protected transient JsonFactory jsonFactory;
/** /**
* Initialize this servlet. * Initialize this servlet.
*/ */
@ -141,6 +146,7 @@ public class JMXJsonServlet extends HttpServlet {
public void init() throws ServletException { public void init() throws ServletException {
// Retrieve the MBean server // Retrieve the MBean server
mBeanServer = ManagementFactory.getPlatformMBeanServer(); mBeanServer = ManagementFactory.getPlatformMBeanServer();
jsonFactory = new JsonFactory();
} }
protected boolean isInstrumentationAccessAllowed(HttpServletRequest request, protected boolean isInstrumentationAccessAllowed(HttpServletRequest request,
@ -181,7 +187,7 @@ public void doGet(HttpServletRequest request, HttpServletResponse response) {
response.setHeader(ACCESS_CONTROL_ALLOW_METHODS, "GET"); response.setHeader(ACCESS_CONTROL_ALLOW_METHODS, "GET");
response.setHeader(ACCESS_CONTROL_ALLOW_ORIGIN, "*"); response.setHeader(ACCESS_CONTROL_ALLOW_ORIGIN, "*");
jg = JacksonUtil.getSharedWriter().createGenerator(writer); jg = jsonFactory.createGenerator(writer);
jg.disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET); jg.disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET);
jg.useDefaultPrettyPrinter(); jg.useDefaultPrettyPrinter();
jg.writeStartObject(); jg.writeStartObject();

View File

@ -21,8 +21,8 @@
import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.util.JacksonUtil;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter; import com.fasterxml.jackson.databind.ObjectWriter;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -46,7 +46,8 @@ public class MetricsJsonBuilder extends MetricsRecordBuilder {
private final MetricsCollector parent; private final MetricsCollector parent;
private Map<String, Object> innerMetrics = new LinkedHashMap<>(); private Map<String, Object> innerMetrics = new LinkedHashMap<>();
private static final ObjectWriter WRITER = JacksonUtil.getSharedWriter(); private static final ObjectWriter WRITER =
new ObjectMapper().writer();
/** /**
* Build an instance. * Build an instance.

View File

@ -46,7 +46,6 @@
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier; import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager; import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager;
import org.apache.hadoop.util.HttpExceptionUtils; import org.apache.hadoop.util.HttpExceptionUtils;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -166,7 +165,7 @@ public void initTokenManager(Properties config) {
@VisibleForTesting @VisibleForTesting
public void initJsonFactory(Properties config) { public void initJsonFactory(Properties config) {
boolean hasFeature = false; boolean hasFeature = false;
JsonFactory tmpJsonFactory = JacksonUtil.createBasicJsonFactory(); JsonFactory tmpJsonFactory = new JsonFactory();
for (Map.Entry entry : config.entrySet()) { for (Map.Entry entry : config.entrySet()) {
String key = (String)entry.getKey(); String key = (String)entry.getKey();
@ -336,7 +335,7 @@ public boolean managementOperation(AuthenticationToken token,
if (map != null) { if (map != null) {
response.setContentType(MediaType.APPLICATION_JSON); response.setContentType(MediaType.APPLICATION_JSON);
Writer writer = response.getWriter(); Writer writer = response.getWriter();
ObjectMapper jsonMapper = JacksonUtil.createObjectMapper(jsonFactory); ObjectMapper jsonMapper = new ObjectMapper(jsonFactory);
jsonMapper.writeValue(writer, map); jsonMapper.writeValue(writer, map);
writer.write(ENTER); writer.write(ENTER);
writer.flush(); writer.flush();

View File

@ -1,123 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.util;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.fasterxml.jackson.databind.json.JsonMapper;
import org.apache.hadoop.classification.InterfaceAudience.Private;
/**
* Utility for sharing code related to Jackson usage in Hadoop.
*/
@Private
public final class JacksonUtil {
private static final ObjectMapper SHARED_BASIC_OBJECT_MAPPER = createBasicObjectMapper();
private static final ObjectReader SHARED_BASIC_OBJECT_READER =
SHARED_BASIC_OBJECT_MAPPER.reader();
private static final ObjectWriter SHARED_BASIC_OBJECT_WRITER =
SHARED_BASIC_OBJECT_MAPPER.writer();
private static final ObjectWriter SHARED_BASIC_OBJECT_WRITER_PRETTY =
SHARED_BASIC_OBJECT_MAPPER.writerWithDefaultPrettyPrinter();
/**
* Creates a new {@link JsonFactory} instance with basic configuration.
*
* @return an {@link JsonFactory} with basic configuration
*/
public static JsonFactory createBasicJsonFactory() {
// deliberately return a new instance instead of sharing one because we can't trust
// that users won't modify this instance
return new JsonFactory();
}
/**
* Creates a new {@link ObjectMapper} instance with basic configuration.
*
* @return an {@link ObjectMapper} with basic configuration
*/
public static ObjectMapper createBasicObjectMapper() {
// deliberately return a new instance instead of sharing one because we can't trust
// that users won't modify this instance
return JsonMapper.builder(createBasicJsonFactory()).build();
}
/**
* Creates a new {@link ObjectMapper} instance based on the configuration
* in the input {@link JsonFactory}.
*
* @param jsonFactory a pre-configured {@link JsonFactory}
* @return an {@link ObjectMapper} with configuration set by the input {@link JsonFactory}.
*/
public static ObjectMapper createObjectMapper(final JsonFactory jsonFactory) {
return JsonMapper.builder(jsonFactory).build();
}
/**
* Returns a shared {@link ObjectReader} instance with basic configuration.
*
* @return a shared {@link ObjectReader} instance with basic configuration
*/
public static ObjectReader getSharedReader() {
return SHARED_BASIC_OBJECT_READER;
}
/**
* Returns an {@link ObjectReader} for the given type instance with basic configuration.
*
* @param type the class that the reader has to support
* @return an {@link ObjectReader} instance with basic configuration
*/
public static ObjectReader createBasicReaderFor(Class<?> type) {
return SHARED_BASIC_OBJECT_MAPPER.readerFor(type);
}
/**
* Returns a shared {@link ObjectWriter} instance with basic configuration.
*
* @return a shared {@link ObjectWriter} instance with basic configuration
*/
public static ObjectWriter getSharedWriter() {
return SHARED_BASIC_OBJECT_WRITER;
}
/**
* Returns a shared {@link ObjectWriter} instance with pretty print and basic configuration.
*
* @return a shared {@link ObjectWriter} instance with pretty print and basic configuration
*/
public static ObjectWriter getSharedWriterWithPrettyPrint() {
return SHARED_BASIC_OBJECT_WRITER_PRETTY;
}
/**
* Returns an {@link ObjectWriter} for the given type instance with basic configuration.
*
* @param type the class that the writer has to support
* @return an {@link ObjectWriter} instance with basic configuration
*/
public static ObjectWriter createBasicWriterFor(Class<?> type) {
return SHARED_BASIC_OBJECT_MAPPER.writerFor(type);
}
private JacksonUtil() {}
}

View File

@ -76,8 +76,11 @@ public class JsonSerialization<T> {
private final Class<T> classType; private final Class<T> classType;
private final ObjectMapper mapper; private final ObjectMapper mapper;
private static final ObjectWriter WRITER = JacksonUtil.getSharedWriterWithPrettyPrint(); private static final ObjectWriter WRITER =
private static final ObjectReader MAP_READER = JacksonUtil.createBasicReaderFor(Map.class); new ObjectMapper().writerWithDefaultPrettyPrinter();
private static final ObjectReader MAP_READER =
new ObjectMapper().readerFor(Map.class);
/** /**
* @return an ObjectWriter which pretty-prints its output * @return an ObjectWriter which pretty-prints its output
@ -103,7 +106,7 @@ public JsonSerialization(Class<T> classType,
boolean failOnUnknownProperties, boolean pretty) { boolean failOnUnknownProperties, boolean pretty) {
Preconditions.checkArgument(classType != null, "null classType"); Preconditions.checkArgument(classType != null, "null classType");
this.classType = classType; this.classType = classType;
this.mapper = JacksonUtil.createBasicObjectMapper(); this.mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES,
failOnUnknownProperties); failOnUnknownProperties);
mapper.configure(SerializationFeature.INDENT_OUTPUT, pretty); mapper.configure(SerializationFeature.INDENT_OUTPUT, pretty);

View File

@ -17,8 +17,9 @@
*/ */
package org.apache.hadoop.crypto.key.kms.server; package org.apache.hadoop.crypto.key.kms.server;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.util.JacksonUtil;
import javax.ws.rs.Consumes; import javax.ws.rs.Consumes;
import javax.ws.rs.WebApplicationException; import javax.ws.rs.WebApplicationException;
@ -37,6 +38,7 @@
@Consumes(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON)
@InterfaceAudience.Private @InterfaceAudience.Private
public class KMSJSONReader implements MessageBodyReader<Object> { public class KMSJSONReader implements MessageBodyReader<Object> {
private static final ObjectMapper MAPPER = new ObjectMapper();
@Override @Override
public boolean isReadable(Class<?> type, Type genericType, public boolean isReadable(Class<?> type, Type genericType,
@ -50,6 +52,6 @@ public Object readFrom(Class<Object> type, Type genericType,
Annotation[] annotations, MediaType mediaType, Annotation[] annotations, MediaType mediaType,
MultivaluedMap<String, String> httpHeaders, InputStream entityStream) MultivaluedMap<String, String> httpHeaders, InputStream entityStream)
throws IOException, WebApplicationException { throws IOException, WebApplicationException {
return JacksonUtil.getSharedReader().readValue(entityStream, type); return MAPPER.readValue(entityStream, type);
} }
} }

View File

@ -20,8 +20,8 @@
package org.apache.hadoop.hdfs.server.datanode; package org.apache.hadoop.hdfs.server.datanode;
import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
@ -35,8 +35,9 @@
@InterfaceStability.Unstable @InterfaceStability.Unstable
@JsonInclude(JsonInclude.Include.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public class DiskBalancerWorkItem { public class DiskBalancerWorkItem {
private static final ObjectMapper MAPPER = new ObjectMapper();
private static final ObjectReader READER = private static final ObjectReader READER =
JacksonUtil.createBasicReaderFor(DiskBalancerWorkItem.class); new ObjectMapper().readerFor(DiskBalancerWorkItem.class);
private long startTime; private long startTime;
private long secondsElapsed; private long secondsElapsed;
@ -172,7 +173,7 @@ public void incBlocksCopied() {
* @throws IOException * @throws IOException
*/ */
public String toJson() throws IOException { public String toJson() throws IOException {
return JacksonUtil.getSharedWriter().writeValueAsString(this); return MAPPER.writeValueAsString(this);
} }
/** /**

View File

@ -23,7 +23,6 @@
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.SerializationFeature; import com.fasterxml.jackson.databind.SerializationFeature;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
@ -40,13 +39,14 @@
@InterfaceAudience.Private @InterfaceAudience.Private
@InterfaceStability.Unstable @InterfaceStability.Unstable
public class DiskBalancerWorkStatus { public class DiskBalancerWorkStatus {
private static final ObjectMapper MAPPER = JacksonUtil.createBasicObjectMapper(); private static final ObjectMapper MAPPER = new ObjectMapper();
private static final ObjectMapper MAPPER_WITH_INDENT_OUTPUT = private static final ObjectMapper MAPPER_WITH_INDENT_OUTPUT =
JacksonUtil.createBasicObjectMapper().enable(SerializationFeature.INDENT_OUTPUT); new ObjectMapper().enable(SerializationFeature.INDENT_OUTPUT);
private static final ObjectReader READER_WORKSTATUS = private static final ObjectReader READER_WORKSTATUS =
MAPPER.readerFor(DiskBalancerWorkStatus.class); new ObjectMapper().readerFor(DiskBalancerWorkStatus.class);
private static final ObjectReader READER_WORKENTRY = MAPPER.readerFor( private static final ObjectReader READER_WORKENTRY = new ObjectMapper()
defaultInstance().constructCollectionType(List.class, DiskBalancerWorkEntry.class)); .readerFor(defaultInstance().constructCollectionType(List.class,
DiskBalancerWorkEntry.class));
private final List<DiskBalancerWorkEntry> currentState; private final List<DiskBalancerWorkEntry> currentState;
private Result result; private Result result;

View File

@ -18,7 +18,9 @@
package org.apache.hadoop.hdfs.util; package org.apache.hadoop.hdfs.util;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectReader;
import java.io.File; import java.io.File;
@ -40,7 +42,6 @@
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties; import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties;
import org.apache.hadoop.util.JacksonUtil;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -82,6 +83,7 @@ private CombinedHostsFileReader() {
public static DatanodeAdminProperties[] public static DatanodeAdminProperties[]
readFile(final String hostsFilePath) throws IOException { readFile(final String hostsFilePath) throws IOException {
DatanodeAdminProperties[] allDNs = new DatanodeAdminProperties[0]; DatanodeAdminProperties[] allDNs = new DatanodeAdminProperties[0];
ObjectMapper objectMapper = new ObjectMapper();
File hostFile = new File(hostsFilePath); File hostFile = new File(hostsFilePath);
boolean tryOldFormat = false; boolean tryOldFormat = false;
@ -89,8 +91,7 @@ private CombinedHostsFileReader() {
try (Reader input = try (Reader input =
new InputStreamReader( new InputStreamReader(
Files.newInputStream(hostFile.toPath()), StandardCharsets.UTF_8)) { Files.newInputStream(hostFile.toPath()), StandardCharsets.UTF_8)) {
allDNs = JacksonUtil.getSharedReader() allDNs = objectMapper.readValue(input, DatanodeAdminProperties[].class);
.readValue(input, DatanodeAdminProperties[].class);
} catch (JsonMappingException jme) { } catch (JsonMappingException jme) {
// The old format doesn't have json top-level token to enclose // The old format doesn't have json top-level token to enclose
// the array. // the array.
@ -102,12 +103,15 @@ private CombinedHostsFileReader() {
} }
if (tryOldFormat) { if (tryOldFormat) {
ObjectReader objectReader = JacksonUtil.createBasicReaderFor(DatanodeAdminProperties.class); ObjectReader objectReader =
objectMapper.readerFor(DatanodeAdminProperties.class);
JsonFactory jsonFactory = new JsonFactory();
List<DatanodeAdminProperties> all = new ArrayList<>(); List<DatanodeAdminProperties> all = new ArrayList<>();
try (Reader input = try (Reader input =
new InputStreamReader(Files.newInputStream(Paths.get(hostsFilePath)), new InputStreamReader(Files.newInputStream(Paths.get(hostsFilePath)),
StandardCharsets.UTF_8)) { StandardCharsets.UTF_8)) {
Iterator<DatanodeAdminProperties> iterator = objectReader.readValues(input); Iterator<DatanodeAdminProperties> iterator =
objectReader.readValues(jsonFactory.createParser(input));
while (iterator.hasNext()) { while (iterator.hasNext()) {
DatanodeAdminProperties properties = iterator.next(); DatanodeAdminProperties properties = iterator.next();
all.add(properties); all.add(properties);

View File

@ -26,11 +26,11 @@
import java.nio.file.Paths; import java.nio.file.Paths;
import java.util.Set; import java.util.Set;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties; import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties;
import org.apache.hadoop.util.JacksonUtil;
/** /**
* Writer support for JSON-based datanode configuration, an alternative format * Writer support for JSON-based datanode configuration, an alternative format
@ -59,10 +59,12 @@ private CombinedHostsFileWriter() {
*/ */
public static void writeFile(final String hostsFile, public static void writeFile(final String hostsFile,
final Set<DatanodeAdminProperties> allDNs) throws IOException { final Set<DatanodeAdminProperties> allDNs) throws IOException {
final ObjectMapper objectMapper = new ObjectMapper();
try (Writer output = try (Writer output =
new OutputStreamWriter(Files.newOutputStream(Paths.get(hostsFile)), new OutputStreamWriter(Files.newOutputStream(Paths.get(hostsFile)),
StandardCharsets.UTF_8)) { StandardCharsets.UTF_8)) {
JacksonUtil.getSharedWriter().writeValue(output, allDNs); objectMapper.writeValue(output, allDNs);
} }
} }
} }

View File

@ -17,12 +17,12 @@
*/ */
package org.apache.hadoop.hdfs.web; package org.apache.hadoop.hdfs.web;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps; import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.ContentSummary;
@ -654,7 +654,7 @@ static List<String> toXAttrNames(final Map<?, ?> json)
} }
final String namesInJson = (String) json.get("XAttrNames"); final String namesInJson = (String) json.get("XAttrNames");
ObjectReader reader = JacksonUtil.createBasicReaderFor(List.class); ObjectReader reader = new ObjectMapper().readerFor(List.class);
final List<Object> xattrs = reader.readValue(namesInJson); final List<Object> xattrs = reader.readValue(namesInJson);
final List<String> names = final List<String> names =
Lists.newArrayListWithCapacity(json.keySet().size()); Lists.newArrayListWithCapacity(json.keySet().size());

View File

@ -71,7 +71,6 @@
import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticator; import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticator;
import org.apache.hadoop.security.token.delegation.web.KerberosDelegationTokenAuthenticator; import org.apache.hadoop.security.token.delegation.web.KerberosDelegationTokenAuthenticator;
import org.apache.hadoop.util.HttpExceptionUtils; import org.apache.hadoop.util.HttpExceptionUtils;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Lists; import org.apache.hadoop.util.Lists;
import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ReflectionUtils;
@ -1819,7 +1818,7 @@ public Collection<FileStatus> getTrashRoots(boolean allUsers) {
@VisibleForTesting @VisibleForTesting
static BlockLocation[] toBlockLocations(JSONObject json) throws IOException { static BlockLocation[] toBlockLocations(JSONObject json) throws IOException {
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); ObjectMapper mapper = new ObjectMapper();
MapType subType = mapper.getTypeFactory().constructMapType(Map.class, MapType subType = mapper.getTypeFactory().constructMapType(Map.class,
String.class, BlockLocation[].class); String.class, BlockLocation[].class);
MapType rootType = mapper.getTypeFactory().constructMapType(Map.class, MapType rootType = mapper.getTypeFactory().constructMapType(Map.class,

View File

@ -21,6 +21,7 @@
import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter; import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList; import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList;
@ -31,7 +32,6 @@
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.server.protocol.SlowDiskReports; import org.apache.hadoop.hdfs.server.protocol.SlowDiskReports;
import org.apache.hadoop.hdfs.server.protocol.SlowDiskReports.DiskOp; import org.apache.hadoop.hdfs.server.protocol.SlowDiskReports.DiskOp;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Lists; import org.apache.hadoop.util.Lists;
import org.apache.hadoop.util.Timer; import org.apache.hadoop.util.Timer;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -71,7 +71,7 @@ public class SlowDiskTracker {
/** /**
* ObjectWriter to convert JSON reports to String. * ObjectWriter to convert JSON reports to String.
*/ */
private static final ObjectWriter WRITER = JacksonUtil.getSharedWriter(); private static final ObjectWriter WRITER = new ObjectMapper().writer();
/** /**
* Number of disks to include in JSON report per operation. We will return * Number of disks to include in JSON report per operation. We will return

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.hdfs.server.blockmanagement; package org.apache.hadoop.hdfs.server.blockmanagement;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter; import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap; import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap;
@ -29,7 +30,6 @@
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.server.protocol.OutlierMetrics; import org.apache.hadoop.hdfs.server.protocol.OutlierMetrics;
import org.apache.hadoop.hdfs.server.protocol.SlowPeerReports; import org.apache.hadoop.hdfs.server.protocol.SlowPeerReports;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Timer; import org.apache.hadoop.util.Timer;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -75,8 +75,7 @@ public class SlowPeerTracker {
/** /**
* ObjectWriter to convert JSON reports to String. * ObjectWriter to convert JSON reports to String.
*/ */
private static final ObjectWriter WRITER = JacksonUtil.getSharedWriter(); private static final ObjectWriter WRITER = new ObjectMapper().writer();
/** /**
* Number of nodes to include in JSON report. We will return nodes with * Number of nodes to include in JSON report. We will return nodes with
* the highest number of votes from peers. * the highest number of votes from peers.

View File

@ -79,18 +79,18 @@
import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.DataChecksum;
import org.apache.hadoop.util.DiskChecker.DiskErrorException; import org.apache.hadoop.util.DiskChecker.DiskErrorException;
import org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException; import org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import org.apache.hadoop.util.Timer; import org.apache.hadoop.util.Timer;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter; import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner; import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
/** /**
@ -103,9 +103,10 @@
public class FsVolumeImpl implements FsVolumeSpi { public class FsVolumeImpl implements FsVolumeSpi {
public static final Logger LOG = public static final Logger LOG =
LoggerFactory.getLogger(FsVolumeImpl.class); LoggerFactory.getLogger(FsVolumeImpl.class);
private static final ObjectWriter WRITER = JacksonUtil.getSharedWriterWithPrettyPrint(); private static final ObjectWriter WRITER =
new ObjectMapper().writerWithDefaultPrettyPrinter();
private static final ObjectReader READER = private static final ObjectReader READER =
JacksonUtil.createBasicReaderFor(BlockIteratorState.class); new ObjectMapper().readerFor(BlockIteratorState.class);
private final FsDatasetImpl dataset; private final FsDatasetImpl dataset;
private final String storageID; private final String storageID;

View File

@ -32,6 +32,7 @@
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter; import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
@ -59,7 +60,6 @@
import org.apache.hadoop.hdfs.server.datanode.checker.VolumeCheckResult; import org.apache.hadoop.hdfs.server.datanode.checker.VolumeCheckResult;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
import org.apache.hadoop.util.DiskChecker.DiskErrorException; import org.apache.hadoop.util.DiskChecker.DiskErrorException;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import org.apache.hadoop.util.Timer; import org.apache.hadoop.util.Timer;
@ -369,7 +369,7 @@ public void releaseReservedSpace(long bytesToRelease) {
} }
private static final ObjectWriter WRITER = private static final ObjectWriter WRITER =
JacksonUtil.getSharedWriterWithPrettyPrint(); new ObjectMapper().writerWithDefaultPrettyPrinter();
private static class ProvidedBlockIteratorState { private static class ProvidedBlockIteratorState {
ProvidedBlockIteratorState() { ProvidedBlockIteratorState() {

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hdfs.server.diskbalancer.command; package org.apache.hadoop.hdfs.server.diskbalancer.command;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
@ -46,7 +47,6 @@
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.HostsFileReader; import org.apache.hadoop.util.HostsFileReader;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Lists; import org.apache.hadoop.util.Lists;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -77,7 +77,8 @@
* Common interface for command handling. * Common interface for command handling.
*/ */
public abstract class Command extends Configured implements Closeable { public abstract class Command extends Configured implements Closeable {
private static final ObjectReader READER = JacksonUtil.createBasicReaderFor(HashMap.class); private static final ObjectReader READER =
new ObjectMapper().readerFor(HashMap.class);
static final Logger LOG = LoggerFactory.getLogger(Command.class); static final Logger LOG = LoggerFactory.getLogger(Command.class);
private Map<String, String> validArgs = new HashMap<>(); private Map<String, String> validArgs = new HashMap<>();
private URI clusterURI; private URI clusterURI;

View File

@ -17,14 +17,15 @@
package org.apache.hadoop.hdfs.server.diskbalancer.connectors; package org.apache.hadoop.hdfs.server.diskbalancer.connectors;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.util.Preconditions;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerCluster; import org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerCluster;
import org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerDataNode; import org.apache.hadoop.hdfs.server.diskbalancer.datamodel
.DiskBalancerDataNode;
import java.io.File; import java.io.File;
import java.net.URL; import java.net.URL;
@ -37,7 +38,7 @@ public class JsonNodeConnector implements ClusterConnector {
private static final Logger LOG = private static final Logger LOG =
LoggerFactory.getLogger(JsonNodeConnector.class); LoggerFactory.getLogger(JsonNodeConnector.class);
private static final ObjectReader READER = private static final ObjectReader READER =
JacksonUtil.createBasicReaderFor(DiskBalancerCluster.class); new ObjectMapper().readerFor(DiskBalancerCluster.class);
private final URL clusterURI; private final URL clusterURI;
/** /**

View File

@ -19,7 +19,9 @@
import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.hadoop.util.Preconditions;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -29,8 +31,6 @@
import org.apache.hadoop.hdfs.server.diskbalancer.planner.Planner; import org.apache.hadoop.hdfs.server.diskbalancer.planner.Planner;
import org.apache.hadoop.hdfs.server.diskbalancer.planner.PlannerFactory; import org.apache.hadoop.hdfs.server.diskbalancer.planner.PlannerFactory;
import org.apache.hadoop.hdfs.web.JsonUtil; import org.apache.hadoop.hdfs.web.JsonUtil;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Preconditions;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
@ -73,7 +73,7 @@ public class DiskBalancerCluster {
private static final Logger LOG = private static final Logger LOG =
LoggerFactory.getLogger(DiskBalancerCluster.class); LoggerFactory.getLogger(DiskBalancerCluster.class);
private static final ObjectReader READER = private static final ObjectReader READER =
JacksonUtil.createBasicReaderFor(DiskBalancerCluster.class); new ObjectMapper().readerFor(DiskBalancerCluster.class);
private final Set<String> exclusionList; private final Set<String> exclusionList;
private final Set<String> inclusionList; private final Set<String> inclusionList;
private ClusterConnector clusterConnector; private ClusterConnector clusterConnector;

View File

@ -19,10 +19,10 @@
import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.hadoop.hdfs.web.JsonUtil; import org.apache.hadoop.hdfs.web.JsonUtil;
import org.apache.hadoop.util.JacksonUtil;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -34,7 +34,7 @@
@JsonIgnoreProperties(ignoreUnknown = true) @JsonIgnoreProperties(ignoreUnknown = true)
public class DiskBalancerVolume { public class DiskBalancerVolume {
private static final ObjectReader READER = private static final ObjectReader READER =
JacksonUtil.createBasicReaderFor(DiskBalancerVolume.class); new ObjectMapper().readerFor(DiskBalancerVolume.class);
private static final Logger LOG = private static final Logger LOG =
LoggerFactory.getLogger(DiskBalancerVolume.class); LoggerFactory.getLogger(DiskBalancerVolume.class);

View File

@ -18,9 +18,9 @@
package org.apache.hadoop.hdfs.server.diskbalancer.planner; package org.apache.hadoop.hdfs.server.diskbalancer.planner;
import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter; import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.util.Preconditions;
import java.io.IOException; import java.io.IOException;
@ -39,8 +39,10 @@ public class NodePlan {
private int port; private int port;
private long timeStamp; private long timeStamp;
private static final ObjectReader READER = JacksonUtil.createBasicReaderFor(NodePlan.class); private static final ObjectMapper MAPPER = new ObjectMapper();
private static final ObjectWriter WRITER = JacksonUtil.createBasicWriterFor(NodePlan.class); private static final ObjectReader READER = MAPPER.readerFor(NodePlan.class);
private static final ObjectWriter WRITER = MAPPER.writerFor(
MAPPER.constructType(NodePlan.class));
/** /**
* returns timestamp when this plan was created. * returns timestamp when this plan was created.
* *

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.hdfs.server.namenode; package org.apache.hadoop.hdfs.server.namenode;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonGenerator;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager; import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager;
@ -25,7 +26,6 @@
import org.apache.hadoop.net.NodeBase; import org.apache.hadoop.net.NodeBase;
import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.net.HttpHeaders; import org.apache.hadoop.thirdparty.com.google.common.net.HttpHeaders;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import javax.servlet.ServletContext; import javax.servlet.ServletContext;
@ -123,7 +123,8 @@ protected void printTopology(PrintStream stream, List<Node> leaves,
protected void printJsonFormat(PrintStream stream, Map<String, protected void printJsonFormat(PrintStream stream, Map<String,
TreeSet<String>> tree, ArrayList<String> racks) throws IOException { TreeSet<String>> tree, ArrayList<String> racks) throws IOException {
JsonGenerator dumpGenerator = JacksonUtil.getSharedWriter().createGenerator(stream); JsonFactory dumpFactory = new JsonFactory();
JsonGenerator dumpGenerator = dumpFactory.createGenerator(stream);
dumpGenerator.writeStartArray(); dumpGenerator.writeStartArray();
for(String r : racks) { for(String r : racks) {

View File

@ -21,6 +21,7 @@
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonGenerator;
import org.apache.hadoop.hdfs.server.namenode.startupprogress.Phase; import org.apache.hadoop.hdfs.server.namenode.startupprogress.Phase;
import org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgress; import org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgress;
@ -28,7 +29,6 @@
import org.apache.hadoop.hdfs.server.namenode.startupprogress.Step; import org.apache.hadoop.hdfs.server.namenode.startupprogress.Step;
import org.apache.hadoop.hdfs.server.namenode.startupprogress.StepType; import org.apache.hadoop.hdfs.server.namenode.startupprogress.StepType;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
@ -61,7 +61,7 @@ protected void doGet(HttpServletRequest req, HttpServletResponse resp)
StartupProgress prog = NameNodeHttpServer.getStartupProgressFromContext( StartupProgress prog = NameNodeHttpServer.getStartupProgressFromContext(
getServletContext()); getServletContext());
StartupProgressView view = prog.createView(); StartupProgressView view = prog.createView();
JsonGenerator json = JacksonUtil.getSharedWriter().createGenerator(resp.getWriter()); JsonGenerator json = new JsonFactory().createGenerator(resp.getWriter());
try { try {
json.writeStartObject(); json.writeStartObject();
json.writeNumberField(ELAPSED_TIME, view.getElapsedTime()); json.writeNumberField(ELAPSED_TIME, view.getElapsedTime());

View File

@ -17,7 +17,6 @@
*/ */
package org.apache.hadoop.hdfs.web; package org.apache.hadoop.hdfs.web;
import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.ContentSummary;
@ -39,12 +38,13 @@
import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Lists; import org.apache.hadoop.util.Lists;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap; import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException; import java.io.IOException;
import java.util.*; import java.util.*;
@ -52,11 +52,11 @@
public class JsonUtil { public class JsonUtil {
private static final Object[] EMPTY_OBJECT_ARRAY = {}; private static final Object[] EMPTY_OBJECT_ARRAY = {};
// Reuse ObjectWriter instance for improving performance. // Reuse ObjectMapper instance for improving performance.
// ObjectWriter is thread safe as long as we always configure instance // ObjectMapper is thread safe as long as we always configure instance
// before use. We don't have a re-entrant call pattern in WebHDFS, // before use. We don't have a re-entrant call pattern in WebHDFS,
// so we just need to worry about thread-safety. // so we just need to worry about thread-safety.
private static final ObjectWriter SHARED_WRITER = JacksonUtil.getSharedWriter(); private static final ObjectMapper MAPPER = new ObjectMapper();
/** Convert a token object to a Json string. */ /** Convert a token object to a Json string. */
public static String toJsonString(final Token<? extends TokenIdentifier> token public static String toJsonString(final Token<? extends TokenIdentifier> token
@ -93,7 +93,7 @@ public static String toJsonString(final String key, final Object value) {
final Map<String, Object> m = new TreeMap<String, Object>(); final Map<String, Object> m = new TreeMap<String, Object>();
m.put(key, value); m.put(key, value);
try { try {
return SHARED_WRITER.writeValueAsString(m); return MAPPER.writeValueAsString(m);
} catch (IOException ignored) { } catch (IOException ignored) {
} }
return null; return null;
@ -113,7 +113,7 @@ public static String toJsonString(final HdfsFileStatus status,
final Map<String, Object> m = toJsonMap(status); final Map<String, Object> m = toJsonMap(status);
try { try {
return includeType ? return includeType ?
toJsonString(FileStatus.class, m) : SHARED_WRITER.writeValueAsString(m); toJsonString(FileStatus.class, m) : MAPPER.writeValueAsString(m);
} catch (IOException ignored) { } catch (IOException ignored) {
} }
return null; return null;
@ -453,7 +453,7 @@ public static String toJsonString(final AclStatus status) {
finalMap.put(AclStatus.class.getSimpleName(), m); finalMap.put(AclStatus.class.getSimpleName(), m);
try { try {
return SHARED_WRITER.writeValueAsString(finalMap); return MAPPER.writeValueAsString(finalMap);
} catch (IOException ignored) { } catch (IOException ignored) {
} }
return null; return null;
@ -491,7 +491,7 @@ public static String toJsonString(final List<XAttr> xAttrs,
final XAttrCodec encoding) throws IOException { final XAttrCodec encoding) throws IOException {
final Map<String, Object> finalMap = new TreeMap<String, Object>(); final Map<String, Object> finalMap = new TreeMap<String, Object>();
finalMap.put("XAttrs", toJsonArray(xAttrs, encoding)); finalMap.put("XAttrs", toJsonArray(xAttrs, encoding));
return SHARED_WRITER.writeValueAsString(finalMap); return MAPPER.writeValueAsString(finalMap);
} }
public static String toJsonString(final List<XAttr> xAttrs) public static String toJsonString(final List<XAttr> xAttrs)
@ -500,14 +500,14 @@ public static String toJsonString(final List<XAttr> xAttrs)
for (XAttr xAttr : xAttrs) { for (XAttr xAttr : xAttrs) {
names.add(XAttrHelper.getPrefixedName(xAttr)); names.add(XAttrHelper.getPrefixedName(xAttr));
} }
String ret = SHARED_WRITER.writeValueAsString(names); String ret = MAPPER.writeValueAsString(names);
final Map<String, Object> finalMap = new TreeMap<String, Object>(); final Map<String, Object> finalMap = new TreeMap<String, Object>();
finalMap.put("XAttrNames", ret); finalMap.put("XAttrNames", ret);
return SHARED_WRITER.writeValueAsString(finalMap); return MAPPER.writeValueAsString(finalMap);
} }
public static String toJsonString(Object obj) throws IOException { public static String toJsonString(Object obj) throws IOException {
return SHARED_WRITER.writeValueAsString(obj); return MAPPER.writeValueAsString(obj);
} }
public static String toJsonString(BlockStoragePolicy[] storagePolicies) { public static String toJsonString(BlockStoragePolicy[] storagePolicies) {

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.mapred; package org.apache.hadoop.mapred;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerationException; import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonGenerator;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
@ -27,7 +28,6 @@
import org.apache.hadoop.mapreduce.QueueState; import org.apache.hadoop.mapreduce.QueueState;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -531,7 +531,8 @@ static void dumpConfiguration(Writer out, String configFile,
return; return;
} }
JsonGenerator dumpGenerator = JacksonUtil.getSharedWriter().createGenerator(out); JsonFactory dumpFactory = new JsonFactory();
JsonGenerator dumpGenerator = dumpFactory.createGenerator(out);
QueueConfigurationParser parser; QueueConfigurationParser parser;
boolean aclsEnabled = false; boolean aclsEnabled = false;
if (conf != null) { if (conf != null) {

View File

@ -28,7 +28,6 @@
import org.apache.hadoop.mapreduce.Counter; import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.CounterGroup; import org.apache.hadoop.mapreduce.CounterGroup;
import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric;
/** /**
@ -42,7 +41,7 @@ private JobHistoryEventUtils() {
public static final int ATS_CONFIG_PUBLISH_SIZE_BYTES = 10 * 1024; public static final int ATS_CONFIG_PUBLISH_SIZE_BYTES = 10 * 1024;
public static JsonNode countersToJSON(Counters counters) { public static JsonNode countersToJSON(Counters counters) {
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); ObjectMapper mapper = new ObjectMapper();
ArrayNode nodes = mapper.createArrayNode(); ArrayNode nodes = mapper.createArrayNode();
if (counters != null) { if (counters != null) {
for (CounterGroup counterGroup : counters) { for (CounterGroup counterGroup : counters) {

View File

@ -22,6 +22,7 @@
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.Map; import java.util.Map;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -29,7 +30,6 @@
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.s3a.S3AUtils; import org.apache.hadoop.fs.s3a.S3AUtils;
import org.apache.hadoop.util.JacksonUtil;
import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_CONTEXT; import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_CONTEXT;
@ -91,8 +91,8 @@ public static String getS3EncryptionContextBase64Encoded(
if (encryptionContextMap.isEmpty()) { if (encryptionContextMap.isEmpty()) {
return ""; return "";
} }
final String encryptionContextJson = JacksonUtil.getSharedWriter() final String encryptionContextJson = new ObjectMapper().writeValueAsString(
.writeValueAsString(encryptionContextMap); encryptionContextMap);
return Base64.encodeBase64String(encryptionContextJson.getBytes(StandardCharsets.UTF_8)); return Base64.encodeBase64String(encryptionContextJson.getBytes(StandardCharsets.UTF_8));
} catch (IOException e) { } catch (IOException e) {
if (propagateExceptions) { if (propagateExceptions) {

View File

@ -84,7 +84,6 @@
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL; import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.LambdaUtils; import org.apache.hadoop.util.LambdaUtils;
import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
@ -97,6 +96,7 @@
import static org.apache.hadoop.fs.azure.NativeAzureFileSystemHelper.*; import static org.apache.hadoop.fs.azure.NativeAzureFileSystemHelper.*;
import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapabilityArgs; import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapabilityArgs;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.classification.VisibleForTesting;
import com.microsoft.azure.storage.StorageException; import com.microsoft.azure.storage.StorageException;
@ -127,7 +127,7 @@ public static class FolderRenamePending {
private static final int FORMATTING_BUFFER = 10000; private static final int FORMATTING_BUFFER = 10000;
private boolean committed; private boolean committed;
public static final String SUFFIX = "-RenamePending.json"; public static final String SUFFIX = "-RenamePending.json";
private static final ObjectReader READER = JacksonUtil.createBasicObjectMapper() private static final ObjectReader READER = new ObjectMapper()
.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true) .configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true)
.readerFor(JsonNode.class); .readerFor(JsonNode.class);

View File

@ -24,11 +24,11 @@
import java.util.List; import java.util.List;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.azure.security.Constants; import org.apache.hadoop.fs.azure.security.Constants;
import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.io.retry.RetryPolicy;
import org.apache.hadoop.io.retry.RetryUtils; import org.apache.hadoop.io.retry.RetryUtils;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.http.NameValuePair; import org.apache.http.NameValuePair;
@ -40,7 +40,7 @@
import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectMapper;
import static org.apache.hadoop.fs.azure.WasbRemoteCallHelper.REMOTE_CALL_SUCCESS_CODE; import static org.apache.hadoop.fs.azure.WasbRemoteCallHelper.REMOTE_CALL_SUCCESS_CODE;
@ -53,8 +53,8 @@ public class RemoteSASKeyGeneratorImpl extends SASKeyGeneratorImpl {
public static final Logger LOG = public static final Logger LOG =
LoggerFactory.getLogger(AzureNativeFileSystemStore.class); LoggerFactory.getLogger(AzureNativeFileSystemStore.class);
private static final ObjectReader RESPONSE_READER = JacksonUtil private static final ObjectReader RESPONSE_READER = new ObjectMapper()
.createBasicReaderFor(RemoteSASKeyGenerationResponse.class); .readerFor(RemoteSASKeyGenerationResponse.class);
/** /**
* Configuration parameter name expected in the Configuration * Configuration parameter name expected in the Configuration

View File

@ -20,6 +20,7 @@
import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@ -28,14 +29,13 @@
import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.io.retry.RetryPolicy;
import org.apache.hadoop.io.retry.RetryUtils; import org.apache.hadoop.io.retry.RetryUtils;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.utils.URIBuilder; import org.apache.http.client.utils.URIBuilder;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.util.concurrent.TimeUnit;
import java.io.IOException; import java.io.IOException;
import java.util.concurrent.TimeUnit;
import static org.apache.hadoop.fs.azure.WasbRemoteCallHelper.REMOTE_CALL_SUCCESS_CODE; import static org.apache.hadoop.fs.azure.WasbRemoteCallHelper.REMOTE_CALL_SUCCESS_CODE;
@ -49,8 +49,8 @@ public class RemoteWasbAuthorizerImpl implements WasbAuthorizerInterface {
public static final Logger LOG = LoggerFactory public static final Logger LOG = LoggerFactory
.getLogger(RemoteWasbAuthorizerImpl.class); .getLogger(RemoteWasbAuthorizerImpl.class);
private static final ObjectReader RESPONSE_READER = JacksonUtil private static final ObjectReader RESPONSE_READER = new ObjectMapper()
.createBasicReaderFor(RemoteWasbAuthorizerResponse.class); .readerFor(RemoteWasbAuthorizerResponse.class);
/** /**
* Configuration parameter name expected in the Configuration object to * Configuration parameter name expected in the Configuration object to
@ -176,7 +176,7 @@ private boolean authorizeInternal(String wasbAbsolutePath, String accessType, St
uriBuilder uriBuilder
.addParameter(WASB_ABSOLUTE_PATH_QUERY_PARAM_NAME, wasbAbsolutePath); .addParameter(WASB_ABSOLUTE_PATH_QUERY_PARAM_NAME, wasbAbsolutePath);
uriBuilder.addParameter(ACCESS_OPERATION_QUERY_PARAM_NAME, accessType); uriBuilder.addParameter(ACCESS_OPERATION_QUERY_PARAM_NAME, accessType);
if (StringUtils.isNotEmpty(resourceOwner)) { if (resourceOwner != null && StringUtils.isNotEmpty(resourceOwner)) {
uriBuilder.addParameter(WASB_RESOURCE_OWNER_QUERY_PARAM_NAME, uriBuilder.addParameter(WASB_RESOURCE_OWNER_QUERY_PARAM_NAME,
resourceOwner); resourceOwner);
} }

View File

@ -29,6 +29,9 @@
import java.util.Hashtable; import java.util.Hashtable;
import java.util.Map; import java.util.Map;
import org.apache.hadoop.util.Preconditions;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken; import com.fasterxml.jackson.core.JsonToken;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -39,8 +42,6 @@
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.azurebfs.services.AbfsIoUtils; import org.apache.hadoop.fs.azurebfs.services.AbfsIoUtils;
import org.apache.hadoop.fs.azurebfs.services.ExponentialRetryPolicy; import org.apache.hadoop.fs.azurebfs.services.ExponentialRetryPolicy;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Preconditions;
/** /**
* This class provides convenience methods to obtain AAD tokens. * This class provides convenience methods to obtain AAD tokens.
@ -492,7 +493,8 @@ private static AzureADToken parseTokenFromStream(
int expiryPeriodInSecs = 0; int expiryPeriodInSecs = 0;
long expiresOnInSecs = -1; long expiresOnInSecs = -1;
JsonParser jp = JacksonUtil.createBasicJsonFactory().createParser(httpResponseStream); JsonFactory jf = new JsonFactory();
JsonParser jp = jf.createParser(httpResponseStream);
String fieldName, fieldValue; String fieldName, fieldValue;
jp.nextToken(); jp.nextToken();
while (jp.hasCurrentToken()) { while (jp.hasCurrentToken()) {

View File

@ -30,6 +30,7 @@
import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken; import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -39,7 +40,6 @@
import org.apache.hadoop.fs.azurebfs.contracts.services.AbfsPerfLoggable; import org.apache.hadoop.fs.azurebfs.contracts.services.AbfsPerfLoggable;
import org.apache.hadoop.fs.azurebfs.contracts.services.ListResultSchema; import org.apache.hadoop.fs.azurebfs.contracts.services.ListResultSchema;
import org.apache.hadoop.fs.azurebfs.utils.UriUtils; import org.apache.hadoop.fs.azurebfs.utils.UriUtils;
import org.apache.hadoop.util.JacksonUtil;
/** /**
* Base Http operation class for orchestrating server IO calls. Child classes would * Base Http operation class for orchestrating server IO calls. Child classes would
@ -447,7 +447,7 @@ private void processStorageErrorResponse() {
if (stream == null) { if (stream == null) {
return; return;
} }
JsonFactory jf = JacksonUtil.createBasicJsonFactory(); JsonFactory jf = new JsonFactory();
try (JsonParser jp = jf.createParser(stream)) { try (JsonParser jp = jf.createParser(stream)) {
String fieldName, fieldValue; String fieldName, fieldValue;
jp.nextToken(); // START_OBJECT - { jp.nextToken(); // START_OBJECT - {
@ -509,7 +509,8 @@ private void parseListFilesResponse(final InputStream stream)
} }
try { try {
this.listResultSchema = JacksonUtil.getSharedReader().readValue(stream, final ObjectMapper objectMapper = new ObjectMapper();
this.listResultSchema = objectMapper.readValue(stream,
ListResultSchema.class); ListResultSchema.class);
} catch (IOException ex) { } catch (IOException ex) {
log.error("Unable to deserialize list results", ex); log.error("Unable to deserialize list results", ex);

View File

@ -51,7 +51,6 @@
import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler;
import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
@ -485,7 +484,7 @@ static Set<String> parseStaleDataNodeList(String liveNodeJsonString,
final int blockThreshold, final Logger log) throws IOException { final int blockThreshold, final Logger log) throws IOException {
final Set<String> dataNodesToReport = new HashSet<>(); final Set<String> dataNodesToReport = new HashSet<>();
JsonFactory fac = JacksonUtil.createBasicJsonFactory(); JsonFactory fac = new JsonFactory();
JsonParser parser = fac.createParser(IOUtils JsonParser parser = fac.createParser(IOUtils
.toInputStream(liveNodeJsonString, StandardCharsets.UTF_8.name())); .toInputStream(liveNodeJsonString, StandardCharsets.UTF_8.name()));
@ -555,7 +554,7 @@ static String fetchNameNodeJMXValue(Properties nameNodeProperties,
"Unable to retrieve JMX: " + conn.getResponseMessage()); "Unable to retrieve JMX: " + conn.getResponseMessage());
} }
InputStream in = conn.getInputStream(); InputStream in = conn.getInputStream();
JsonFactory fac = JacksonUtil.createBasicJsonFactory(); JsonFactory fac = new JsonFactory();
JsonParser parser = fac.createParser(in); JsonParser parser = fac.createParser(in);
if (parser.nextToken() != JsonToken.START_OBJECT if (parser.nextToken() != JsonToken.START_OBJECT
|| parser.nextToken() != JsonToken.FIELD_NAME || parser.nextToken() != JsonToken.FIELD_NAME

View File

@ -22,6 +22,7 @@
import java.io.OutputStream; import java.io.OutputStream;
import com.fasterxml.jackson.core.JsonEncoding; import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.Version; import com.fasterxml.jackson.core.Version;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
@ -35,7 +36,6 @@
import org.apache.hadoop.io.compress.CompressionCodecFactory; import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.io.compress.Compressor;
import org.apache.hadoop.mapreduce.ID; import org.apache.hadoop.mapreduce.ID;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.tools.rumen.datatypes.*; import org.apache.hadoop.tools.rumen.datatypes.*;
@ -55,7 +55,8 @@ public class Anonymizer extends Configured implements Tool {
private StatePool statePool; private StatePool statePool;
private ObjectMapper outMapper = null; private ObjectMapper outMapper = null;
private JsonFactory outFactory = null;
private void initialize(String[] args) throws Exception { private void initialize(String[] args) throws Exception {
try { try {
for (int i = 0; i < args.length; ++i) { for (int i = 0; i < args.length; ++i) {
@ -84,7 +85,7 @@ private void initialize(String[] args) throws Exception {
// initialize the state manager after the anonymizers are registered // initialize the state manager after the anonymizers are registered
statePool.initialize(getConf()); statePool.initialize(getConf());
outMapper = JacksonUtil.createBasicObjectMapper(); outMapper = new ObjectMapper();
// define a module // define a module
SimpleModule module = new SimpleModule( SimpleModule module = new SimpleModule(
"Anonymization Serializer", new Version(0, 1, 1, "FINAL", "", "")); "Anonymization Serializer", new Version(0, 1, 1, "FINAL", "", ""));
@ -103,6 +104,8 @@ private void initialize(String[] args) throws Exception {
// register the module with the object-mapper // register the module with the object-mapper
outMapper.registerModule(module); outMapper.registerModule(module);
outFactory = outMapper.getFactory();
} }
// anonymize the job trace file // anonymize the job trace file
@ -188,7 +191,7 @@ private JsonGenerator createJsonGenerator(Configuration conf, Path path)
} }
JsonGenerator outGen = JsonGenerator outGen =
outMapper.createGenerator(output, JsonEncoding.UTF8); outFactory.createGenerator(output, JsonEncoding.UTF8);
outGen.useDefaultPrettyPrinter(); outGen.useDefaultPrettyPrinter();
return outGen; return outGen;

View File

@ -26,7 +26,6 @@
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.JacksonUtil;
/** /**
* A simple wrapper for parsing JSON-encoded data using ObjectMapper. * A simple wrapper for parsing JSON-encoded data using ObjectMapper.
@ -49,10 +48,10 @@ class JsonObjectMapperParser<T> implements Closeable {
*/ */
public JsonObjectMapperParser(Path path, Class<? extends T> clazz, public JsonObjectMapperParser(Path path, Class<? extends T> clazz,
Configuration conf) throws IOException { Configuration conf) throws IOException {
mapper = JacksonUtil.createBasicObjectMapper(); mapper = new ObjectMapper();
this.clazz = clazz; this.clazz = clazz;
InputStream input = new PossiblyDecompressedInputStream(path, conf); InputStream input = new PossiblyDecompressedInputStream(path, conf);
jsonParser = mapper.createParser(input); jsonParser = mapper.getFactory().createParser(input);
} }
/** /**
@ -63,9 +62,9 @@ public JsonObjectMapperParser(Path path, Class<? extends T> clazz,
*/ */
public JsonObjectMapperParser(InputStream input, Class<? extends T> clazz) public JsonObjectMapperParser(InputStream input, Class<? extends T> clazz)
throws IOException { throws IOException {
mapper = JacksonUtil.createBasicObjectMapper(); mapper = new ObjectMapper();
this.clazz = clazz; this.clazz = clazz;
jsonParser = mapper.createParser(input); jsonParser = mapper.getFactory().createParser(input);
} }
/** /**

View File

@ -30,7 +30,6 @@
import org.apache.hadoop.tools.rumen.datatypes.DataType; import org.apache.hadoop.tools.rumen.datatypes.DataType;
import org.apache.hadoop.tools.rumen.serializers.DefaultRumenSerializer; import org.apache.hadoop.tools.rumen.serializers.DefaultRumenSerializer;
import org.apache.hadoop.tools.rumen.serializers.ObjectStringSerializer; import org.apache.hadoop.tools.rumen.serializers.ObjectStringSerializer;
import org.apache.hadoop.util.JacksonUtil;
/** /**
* Simple wrapper around {@link JsonGenerator} to write objects in JSON format. * Simple wrapper around {@link JsonGenerator} to write objects in JSON format.
@ -40,7 +39,7 @@ public class JsonObjectMapperWriter<T> implements Closeable {
private JsonGenerator writer; private JsonGenerator writer;
public JsonObjectMapperWriter(OutputStream output, boolean prettyPrint) throws IOException { public JsonObjectMapperWriter(OutputStream output, boolean prettyPrint) throws IOException {
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); ObjectMapper mapper = new ObjectMapper();
// define a module // define a module
SimpleModule module = new SimpleModule( SimpleModule module = new SimpleModule(
@ -54,7 +53,7 @@ public JsonObjectMapperWriter(OutputStream output, boolean prettyPrint) throws I
// register the module with the object-mapper // register the module with the object-mapper
mapper.registerModule(module); mapper.registerModule(module);
writer = mapper.createGenerator(output, JsonEncoding.UTF8); writer = mapper.getFactory().createGenerator(output, JsonEncoding.UTF8);
if (prettyPrint) { if (prettyPrint) {
writer.useDefaultPrettyPrinter(); writer.useDefaultPrettyPrinter();
} }

View File

@ -30,6 +30,7 @@
import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.core.JsonEncoding; import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.Version; import com.fasterxml.jackson.core.Version;
@ -43,7 +44,6 @@
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.tools.rumen.Anonymizer; import org.apache.hadoop.tools.rumen.Anonymizer;
import org.apache.hadoop.tools.rumen.datatypes.DataType; import org.apache.hadoop.tools.rumen.datatypes.DataType;
import org.apache.hadoop.util.JacksonUtil;
/** /**
* A pool of states. States used by {@link DataType}'s can be managed the * A pool of states. States used by {@link DataType}'s can be managed the
@ -206,7 +206,7 @@ private boolean reloadState(Path stateFile, Configuration configuration)
} }
private void read(DataInput in) throws IOException { private void read(DataInput in) throws IOException {
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); ObjectMapper mapper = new ObjectMapper();
// define a module // define a module
SimpleModule module = new SimpleModule("State Serializer", SimpleModule module = new SimpleModule("State Serializer",
new Version(0, 1, 1, "FINAL", "", "")); new Version(0, 1, 1, "FINAL", "", ""));
@ -216,7 +216,7 @@ private void read(DataInput in) throws IOException {
// register the module with the object-mapper // register the module with the object-mapper
mapper.registerModule(module); mapper.registerModule(module);
JsonParser parser = mapper.createParser((InputStream)in); JsonParser parser = mapper.getFactory().createParser((InputStream)in);
StatePool statePool = mapper.readValue(parser, StatePool.class); StatePool statePool = mapper.readValue(parser, StatePool.class);
this.setStates(statePool.getStates()); this.setStates(statePool.getStates());
parser.close(); parser.close();
@ -273,7 +273,7 @@ public void persist() throws IOException {
private void write(DataOutput out) throws IOException { private void write(DataOutput out) throws IOException {
// This is just a JSON experiment // This is just a JSON experiment
System.out.println("Dumping the StatePool's in JSON format."); System.out.println("Dumping the StatePool's in JSON format.");
ObjectMapper outMapper = JacksonUtil.createBasicObjectMapper(); ObjectMapper outMapper = new ObjectMapper();
// define a module // define a module
SimpleModule module = new SimpleModule("State Serializer", SimpleModule module = new SimpleModule("State Serializer",
new Version(0, 1, 1, "FINAL", "", "")); new Version(0, 1, 1, "FINAL", "", ""));
@ -283,8 +283,9 @@ private void write(DataOutput out) throws IOException {
// register the module with the object-mapper // register the module with the object-mapper
outMapper.registerModule(module); outMapper.registerModule(module);
JsonFactory outFactory = outMapper.getFactory();
JsonGenerator jGen = JsonGenerator jGen =
outMapper.createGenerator((OutputStream)out, JsonEncoding.UTF8); outFactory.createGenerator((OutputStream)out, JsonEncoding.UTF8);
jGen.useDefaultPrettyPrinter(); jGen.useDefaultPrettyPrinter();
jGen.writeObject(this); jGen.writeObject(this);

View File

@ -23,6 +23,7 @@
import java.util.List; import java.util.List;
import com.fasterxml.jackson.core.JsonEncoding; import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
@ -140,8 +141,9 @@ public static void main(String[] args) throws IOException {
Path goldFilePath = new Path(filePath.getParent(), "gold"+testName); Path goldFilePath = new Path(filePath.getParent(), "gold"+testName);
ObjectMapper mapper = new ObjectMapper(); ObjectMapper mapper = new ObjectMapper();
JsonFactory factory = mapper.getFactory();
FSDataOutputStream ostream = lfs.create(goldFilePath, true); FSDataOutputStream ostream = lfs.create(goldFilePath, true);
JsonGenerator gen = mapper.createGenerator((OutputStream)ostream, JsonGenerator gen = factory.createGenerator((OutputStream)ostream,
JsonEncoding.UTF8); JsonEncoding.UTF8);
gen.useDefaultPrettyPrinter(); gen.useDefaultPrettyPrinter();

View File

@ -16,13 +16,13 @@
package org.apache.hadoop.yarn.sls; package org.apache.hadoop.yarn.sls;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.databind.JavaType; import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.tools.rumen.JobTraceReader; import org.apache.hadoop.tools.rumen.JobTraceReader;
import org.apache.hadoop.tools.rumen.LoggedJob; import org.apache.hadoop.tools.rumen.LoggedJob;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ReservationId; import org.apache.hadoop.yarn.api.records.ReservationId;
@ -44,8 +44,11 @@
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.io.Reader; import java.io.Reader;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
@ -119,14 +122,15 @@ public void startAM() throws YarnException, IOException {
* Parse workload from a SLS trace file. * Parse workload from a SLS trace file.
*/ */
private void startAMFromSLSTrace(String inputTrace) throws IOException { private void startAMFromSLSTrace(String inputTrace) throws IOException {
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); JsonFactory jsonF = new JsonFactory();
ObjectMapper mapper = new ObjectMapper();
try (Reader input = new InputStreamReader( try (Reader input = new InputStreamReader(
new FileInputStream(inputTrace), StandardCharsets.UTF_8)) { new FileInputStream(inputTrace), StandardCharsets.UTF_8)) {
JavaType type = mapper.getTypeFactory(). JavaType type = mapper.getTypeFactory().
constructMapType(Map.class, String.class, String.class); constructMapType(Map.class, String.class, String.class);
Iterator<Map<String, String>> jobIter = mapper.readValues( Iterator<Map<String, String>> jobIter = mapper.readValues(
mapper.createParser(input), type); jsonF.createParser(input), type);
while (jobIter.hasNext()) { while (jobIter.hasNext()) {
try { try {

View File

@ -35,6 +35,7 @@
import java.util.TreeMap; import java.util.TreeMap;
import java.util.TreeSet; import java.util.TreeSet;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter; import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
@ -43,7 +44,6 @@
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.sls.utils.SLSUtils; import org.apache.hadoop.yarn.sls.utils.SLSUtils;
@Private @Private
@ -126,10 +126,10 @@ private static void generateSLSLoadFile(String inputFile, String outputFile)
StandardCharsets.UTF_8)) { StandardCharsets.UTF_8)) {
try (Writer output = try (Writer output =
new OutputStreamWriter(new FileOutputStream(outputFile), StandardCharsets.UTF_8)) { new OutputStreamWriter(new FileOutputStream(outputFile), StandardCharsets.UTF_8)) {
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); ObjectMapper mapper = new ObjectMapper();
ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter(); ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
Iterator<Map> i = mapper.readValues( Iterator<Map> i = mapper.readValues(
mapper.createParser(input), Map.class); new JsonFactory().createParser(input), Map.class);
while (i.hasNext()) { while (i.hasNext()) {
Map m = i.next(); Map m = i.next();
output.write(writer.writeValueAsString(createSLSJob(m)) + EOL); output.write(writer.writeValueAsString(createSLSJob(m)) + EOL);
@ -143,7 +143,7 @@ private static void generateSLSNodeFile(String outputFile)
throws IOException { throws IOException {
try (Writer output = try (Writer output =
new OutputStreamWriter(new FileOutputStream(outputFile), StandardCharsets.UTF_8)) { new OutputStreamWriter(new FileOutputStream(outputFile), StandardCharsets.UTF_8)) {
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); ObjectMapper mapper = new ObjectMapper();
ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter(); ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
for (Map.Entry<String, Set<String>> entry : rackNodeMap.entrySet()) { for (Map.Entry<String, Set<String>> entry : rackNodeMap.entrySet()) {
Map rack = new LinkedHashMap(); Map rack = new LinkedHashMap();

View File

@ -34,7 +34,6 @@
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.tools.rumen.JobStory; import org.apache.hadoop.tools.rumen.JobStory;
import org.apache.hadoop.tools.rumen.JobStoryProducer; import org.apache.hadoop.tools.rumen.JobStoryProducer;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.api.records.ExecutionType; import org.apache.hadoop.yarn.api.records.ExecutionType;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.sls.appmaster.MRAMSimulator; import org.apache.hadoop.yarn.sls.appmaster.MRAMSimulator;
@ -89,8 +88,7 @@ public SynthTraceJobProducer(Configuration conf, Path path)
JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder(); JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder();
jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true); jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true);
ObjectMapper mapper = new ObjectMapper(jsonFactoryBuilder.build());
ObjectMapper mapper = JacksonUtil.createObjectMapper(jsonFactoryBuilder.build());
mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false);
FileSystem ifs = path.getFileSystem(conf); FileSystem ifs = path.getFileSystem(conf);

View File

@ -34,6 +34,7 @@
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.classification.InterfaceStability.Unstable;
@ -44,7 +45,6 @@
import org.apache.hadoop.tools.rumen.LoggedJob; import org.apache.hadoop.tools.rumen.LoggedJob;
import org.apache.hadoop.tools.rumen.LoggedTask; import org.apache.hadoop.tools.rumen.LoggedTask;
import org.apache.hadoop.tools.rumen.LoggedTaskAttempt; import org.apache.hadoop.tools.rumen.LoggedTaskAttempt;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.api.records.NodeLabel; import org.apache.hadoop.yarn.api.records.NodeLabel;
import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceInformation; import org.apache.hadoop.yarn.api.records.ResourceInformation;
@ -120,11 +120,12 @@ public static Set<NodeDetails> parseNodesFromRumenTrace(
public static Set<NodeDetails> parseNodesFromSLSTrace( public static Set<NodeDetails> parseNodesFromSLSTrace(
String jobTrace) throws IOException { String jobTrace) throws IOException {
Set<NodeDetails> nodeSet = new HashSet<>(); Set<NodeDetails> nodeSet = new HashSet<>();
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); JsonFactory jsonF = new JsonFactory();
ObjectMapper mapper = new ObjectMapper();
Reader input = Reader input =
new InputStreamReader(new FileInputStream(jobTrace), StandardCharsets.UTF_8); new InputStreamReader(new FileInputStream(jobTrace), StandardCharsets.UTF_8);
try { try {
Iterator<Map> i = mapper.readValues(mapper.createParser(input), Map.class); Iterator<Map> i = mapper.readValues(jsonF.createParser(input), Map.class);
while (i.hasNext()) { while (i.hasNext()) {
addNodes(nodeSet, i.next()); addNodes(nodeSet, i.next());
} }
@ -166,11 +167,12 @@ private static void addNodes(Set<NodeDetails> nodeSet,
public static Set<NodeDetails> parseNodesFromNodeFile( public static Set<NodeDetails> parseNodesFromNodeFile(
String nodeFile, Resource nmDefaultResource) throws IOException { String nodeFile, Resource nmDefaultResource) throws IOException {
Set<NodeDetails> nodeSet = new HashSet<>(); Set<NodeDetails> nodeSet = new HashSet<>();
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); JsonFactory jsonF = new JsonFactory();
ObjectMapper mapper = new ObjectMapper();
Reader input = Reader input =
new InputStreamReader(new FileInputStream(nodeFile), StandardCharsets.UTF_8); new InputStreamReader(new FileInputStream(nodeFile), StandardCharsets.UTF_8);
try { try {
Iterator<Map> i = mapper.readValues(mapper.createParser(input), Map.class); Iterator<Map> i = mapper.readValues(jsonF.createParser(input), Map.class);
while (i.hasNext()) { while (i.hasNext()) {
Map jsonE = i.next(); Map jsonE = i.next();
String rack = "/" + jsonE.get("rack"); String rack = "/" + jsonE.get("rack");

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.yarn.sls; package org.apache.hadoop.yarn.sls;
import org.apache.commons.math3.random.JDKRandomGenerator; import org.apache.commons.math3.random.JDKRandomGenerator;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.api.records.ExecutionType; import org.apache.hadoop.yarn.api.records.ExecutionType;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.sls.synthetic.SynthJob; import org.apache.hadoop.yarn.sls.synthetic.SynthJob;
@ -61,7 +60,7 @@ public void testWorkloadGenerateTime()
JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder(); JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder();
jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true); jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true);
ObjectMapper mapper = JacksonUtil.createObjectMapper(jsonFactoryBuilder.build()); ObjectMapper mapper = new ObjectMapper(jsonFactoryBuilder.build());
mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false);
SynthTraceJobProducer.Workload wl = SynthTraceJobProducer.Workload wl =
mapper.readValue(workloadJson, SynthTraceJobProducer.Workload.class); mapper.readValue(workloadJson, SynthTraceJobProducer.Workload.class);
@ -182,7 +181,7 @@ public void testSample() throws IOException {
JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder(); JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder();
jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true); jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true);
ObjectMapper mapper = JacksonUtil.createObjectMapper(jsonFactoryBuilder.build()); ObjectMapper mapper = new ObjectMapper(jsonFactoryBuilder.build());
mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false);
JDKRandomGenerator rand = new JDKRandomGenerator(); JDKRandomGenerator rand = new JDKRandomGenerator();

View File

@ -28,7 +28,6 @@
import java.util.Properties; import java.util.Properties;
import java.util.Random; import java.util.Random;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.appcatalog.model.AppEntry; import org.apache.hadoop.yarn.appcatalog.model.AppEntry;
import org.apache.hadoop.yarn.appcatalog.model.AppStoreEntry; import org.apache.hadoop.yarn.appcatalog.model.AppStoreEntry;
import org.apache.hadoop.yarn.appcatalog.model.Application; import org.apache.hadoop.yarn.appcatalog.model.Application;
@ -58,18 +57,6 @@ public class AppCatalogSolrClient {
private static final Logger LOG = LoggerFactory.getLogger(AppCatalogSolrClient.class); private static final Logger LOG = LoggerFactory.getLogger(AppCatalogSolrClient.class);
private static String urlString; private static String urlString;
/**
* It is more performant to reuse ObjectMapper instances but keeping the instance
* private makes it harder for someone to reconfigure it which might have unwanted
* side effects.
*/
private static final ObjectMapper OBJECT_MAPPER;
static {
OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper();
OBJECT_MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
}
public AppCatalogSolrClient() { public AppCatalogSolrClient() {
// Locate Solr URL // Locate Solr URL
ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
@ -159,6 +146,8 @@ public List<AppStoreEntry> search(String keyword) {
public List<AppEntry> listAppEntries() { public List<AppEntry> listAppEntries() {
List<AppEntry> list = new ArrayList<AppEntry>(); List<AppEntry> list = new ArrayList<AppEntry>();
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
SolrClient solr = getSolrClient(); SolrClient solr = getSolrClient();
SolrQuery query = new SolrQuery(); SolrQuery query = new SolrQuery();
@ -175,7 +164,7 @@ public List<AppEntry> listAppEntries() {
entry.setId(d.get("id").toString()); entry.setId(d.get("id").toString());
entry.setName(d.get("name_s").toString()); entry.setName(d.get("name_s").toString());
entry.setApp(d.get("app_s").toString()); entry.setApp(d.get("app_s").toString());
entry.setYarnfile(OBJECT_MAPPER.readValue(d.get("yarnfile_s").toString(), entry.setYarnfile(mapper.readValue(d.get("yarnfile_s").toString(),
Service.class)); Service.class));
list.add(entry); list.add(entry);
} }
@ -187,6 +176,8 @@ public List<AppEntry> listAppEntries() {
public AppStoreEntry findAppStoreEntry(String id) { public AppStoreEntry findAppStoreEntry(String id) {
AppStoreEntry entry = new AppStoreEntry(); AppStoreEntry entry = new AppStoreEntry();
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
SolrClient solr = getSolrClient(); SolrClient solr = getSolrClient();
SolrQuery query = new SolrQuery(); SolrQuery query = new SolrQuery();
@ -206,7 +197,7 @@ public AppStoreEntry findAppStoreEntry(String id) {
entry.setDesc(d.get("desc_s").toString()); entry.setDesc(d.get("desc_s").toString());
entry.setLike(Integer.parseInt(d.get("like_i").toString())); entry.setLike(Integer.parseInt(d.get("like_i").toString()));
entry.setDownload(Integer.parseInt(d.get("download_i").toString())); entry.setDownload(Integer.parseInt(d.get("download_i").toString()));
Service yarnApp = OBJECT_MAPPER.readValue(d.get("yarnfile_s").toString(), Service yarnApp = mapper.readValue(d.get("yarnfile_s").toString(),
Service.class); Service.class);
String name; String name;
try { try {
@ -231,6 +222,9 @@ public AppStoreEntry findAppStoreEntry(String id) {
public AppEntry findAppEntry(String id) { public AppEntry findAppEntry(String id) {
AppEntry entry = new AppEntry(); AppEntry entry = new AppEntry();
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
SolrClient solr = getSolrClient(); SolrClient solr = getSolrClient();
SolrQuery query = new SolrQuery(); SolrQuery query = new SolrQuery();
query.setQuery("id:" + id); query.setQuery("id:" + id);
@ -246,7 +240,7 @@ public AppEntry findAppEntry(String id) {
entry.setId(d.get("id").toString()); entry.setId(d.get("id").toString());
entry.setApp(d.get("app_s").toString()); entry.setApp(d.get("app_s").toString());
entry.setName(d.get("name_s").toString()); entry.setName(d.get("name_s").toString());
entry.setYarnfile(OBJECT_MAPPER.readValue(d.get("yarnfile_s").toString(), entry.setYarnfile(mapper.readValue(d.get("yarnfile_s").toString(),
Service.class)); Service.class));
} }
} catch (SolrServerException | IOException e) { } catch (SolrServerException | IOException e) {
@ -258,6 +252,8 @@ public AppEntry findAppEntry(String id) {
public void deployApp(String id, Service service) throws SolrServerException, public void deployApp(String id, Service service) throws SolrServerException,
IOException { IOException {
long download = 0; long download = 0;
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
Collection<SolrInputDocument> docs = new HashSet<SolrInputDocument>(); Collection<SolrInputDocument> docs = new HashSet<SolrInputDocument>();
SolrClient solr = getSolrClient(); SolrClient solr = getSolrClient();
// Find application information from AppStore // Find application information from AppStore
@ -291,7 +287,7 @@ public void deployApp(String id, Service service) throws SolrServerException,
request.addField("id", name); request.addField("id", name);
request.addField("name_s", name); request.addField("name_s", name);
request.addField("app_s", entry.getOrg()+"/"+entry.getName()); request.addField("app_s", entry.getOrg()+"/"+entry.getName());
request.addField("yarnfile_s", OBJECT_MAPPER.writeValueAsString(service)); request.addField("yarnfile_s", mapper.writeValueAsString(service));
docs.add(request); docs.add(request);
} }
@ -330,6 +326,8 @@ public void deleteApp(String id) {
public void register(Application app) throws IOException { public void register(Application app) throws IOException {
Collection<SolrInputDocument> docs = new HashSet<SolrInputDocument>(); Collection<SolrInputDocument> docs = new HashSet<SolrInputDocument>();
SolrClient solr = getSolrClient(); SolrClient solr = getSolrClient();
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
try { try {
SolrInputDocument buffer = new SolrInputDocument(); SolrInputDocument buffer = new SolrInputDocument();
buffer.setField("id", java.util.UUID.randomUUID().toString() buffer.setField("id", java.util.UUID.randomUUID().toString()
@ -345,10 +343,10 @@ public void register(Application app) throws IOException {
buffer.setField("download_i", 0); buffer.setField("download_i", 0);
// Keep only YARN data model for yarnfile field // Keep only YARN data model for yarnfile field
String yarnFile = OBJECT_MAPPER.writeValueAsString(app); String yarnFile = mapper.writeValueAsString(app);
LOG.info("app:{}", yarnFile); LOG.info("app:"+yarnFile);
Service yarnApp = OBJECT_MAPPER.readValue(yarnFile, Service.class); Service yarnApp = mapper.readValue(yarnFile, Service.class);
buffer.setField("yarnfile_s", OBJECT_MAPPER.writeValueAsString(yarnApp)); buffer.setField("yarnfile_s", mapper.writeValueAsString(yarnApp));
docs.add(buffer); docs.add(buffer);
commitSolrChanges(solr, docs); commitSolrChanges(solr, docs);
@ -361,6 +359,8 @@ public void register(Application app) throws IOException {
protected void register(AppStoreEntry app) throws IOException { protected void register(AppStoreEntry app) throws IOException {
Collection<SolrInputDocument> docs = new HashSet<SolrInputDocument>(); Collection<SolrInputDocument> docs = new HashSet<SolrInputDocument>();
SolrClient solr = getSolrClient(); SolrClient solr = getSolrClient();
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
try { try {
SolrInputDocument buffer = new SolrInputDocument(); SolrInputDocument buffer = new SolrInputDocument();
buffer.setField("id", java.util.UUID.randomUUID().toString() buffer.setField("id", java.util.UUID.randomUUID().toString()
@ -376,10 +376,10 @@ protected void register(AppStoreEntry app) throws IOException {
buffer.setField("download_i", app.getDownload()); buffer.setField("download_i", app.getDownload());
// Keep only YARN data model for yarnfile field // Keep only YARN data model for yarnfile field
String yarnFile = OBJECT_MAPPER.writeValueAsString(app); String yarnFile = mapper.writeValueAsString(app);
LOG.info("app:{}", yarnFile); LOG.info("app:"+yarnFile);
Service yarnApp = OBJECT_MAPPER.readValue(yarnFile, Service.class); Service yarnApp = mapper.readValue(yarnFile, Service.class);
buffer.setField("yarnfile_s", OBJECT_MAPPER.writeValueAsString(yarnApp)); buffer.setField("yarnfile_s", mapper.writeValueAsString(yarnApp));
docs.add(buffer); docs.add(buffer);
commitSolrChanges(solr, docs); commitSolrChanges(solr, docs);
@ -391,6 +391,8 @@ protected void register(AppStoreEntry app) throws IOException {
public void upgradeApp(Service service) throws IOException, public void upgradeApp(Service service) throws IOException,
SolrServerException { SolrServerException {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
Collection<SolrInputDocument> docs = new HashSet<SolrInputDocument>(); Collection<SolrInputDocument> docs = new HashSet<SolrInputDocument>();
SolrClient solr = getSolrClient(); SolrClient solr = getSolrClient();
if (service!=null) { if (service!=null) {
@ -418,7 +420,7 @@ public void upgradeApp(Service service) throws IOException,
request.addField("id", name); request.addField("id", name);
request.addField("name_s", name); request.addField("name_s", name);
request.addField("app_s", app); request.addField("app_s", app);
request.addField("yarnfile_s", OBJECT_MAPPER.writeValueAsString(service)); request.addField("yarnfile_s", mapper.writeValueAsString(service));
docs.add(request); docs.add(request);
} }
try { try {

View File

@ -23,7 +23,6 @@
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.appcatalog.model.AppEntry; import org.apache.hadoop.yarn.appcatalog.model.AppEntry;
import org.apache.hadoop.yarn.service.api.records.Service; import org.apache.hadoop.yarn.service.api.records.Service;
import org.apache.hadoop.yarn.service.api.records.ServiceState; import org.apache.hadoop.yarn.service.api.records.ServiceState;
@ -47,19 +46,6 @@
public class YarnServiceClient { public class YarnServiceClient {
private static final Logger LOG = LoggerFactory.getLogger(YarnServiceClient.class); private static final Logger LOG = LoggerFactory.getLogger(YarnServiceClient.class);
/**
* It is more performant to reuse ObjectMapper instances but keeping the instance
* private makes it harder for someone to reconfigure it which might have unwanted
* side effects.
*/
private static final ObjectMapper OBJECT_MAPPER;
static {
OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper();
OBJECT_MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
}
private static Configuration conf = new Configuration(); private static Configuration conf = new Configuration();
private static ClientConfig getClientConfig() { private static ClientConfig getClientConfig() {
ClientConfig config = new DefaultClientConfig(); ClientConfig config = new DefaultClientConfig();
@ -80,6 +66,8 @@ public YarnServiceClient() {
} }
public void createApp(Service app) { public void createApp(Service app) {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
ClientResponse response; ClientResponse response;
try { try {
boolean useKerberos = UserGroupInformation.isSecurityEnabled(); boolean useKerberos = UserGroupInformation.isSecurityEnabled();
@ -102,7 +90,7 @@ public void createApp(Service app) {
app.setKerberosPrincipal(kerberos); app.setKerberosPrincipal(kerberos);
} }
response = asc.getApiClient().post(ClientResponse.class, response = asc.getApiClient().post(ClientResponse.class,
OBJECT_MAPPER.writeValueAsString(app)); mapper.writeValueAsString(app));
if (response.getStatus() >= 299) { if (response.getStatus() >= 299) {
String message = response.getEntity(String.class); String message = response.getEntity(String.class);
throw new RuntimeException("Failed : HTTP error code : " throw new RuntimeException("Failed : HTTP error code : "
@ -131,8 +119,10 @@ public void deleteApp(String appInstanceId) {
} }
public void restartApp(Service app) throws JsonProcessingException { public void restartApp(Service app) throws JsonProcessingException {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
String appInstanceId = app.getName(); String appInstanceId = app.getName();
String yarnFile = OBJECT_MAPPER.writeValueAsString(app); String yarnFile = mapper.writeValueAsString(app);
ClientResponse response; ClientResponse response;
try { try {
response = asc.getApiClient(asc.getServicePath(appInstanceId)) response = asc.getApiClient(asc.getServicePath(appInstanceId))
@ -149,8 +139,10 @@ public void restartApp(Service app) throws JsonProcessingException {
} }
public void stopApp(Service app) throws JsonProcessingException { public void stopApp(Service app) throws JsonProcessingException {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
String appInstanceId = app.getName(); String appInstanceId = app.getName();
String yarnFile = OBJECT_MAPPER.writeValueAsString(app); String yarnFile = mapper.writeValueAsString(app);
ClientResponse response; ClientResponse response;
try { try {
response = asc.getApiClient(asc.getServicePath(appInstanceId)) response = asc.getApiClient(asc.getServicePath(appInstanceId))
@ -167,12 +159,14 @@ public void stopApp(Service app) throws JsonProcessingException {
} }
public void getStatus(AppEntry entry) { public void getStatus(AppEntry entry) {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
String appInstanceId = entry.getName(); String appInstanceId = entry.getName();
Service app = null; Service app = null;
try { try {
String yarnFile = asc.getApiClient(asc.getServicePath(appInstanceId)) String yarnFile = asc.getApiClient(asc.getServicePath(appInstanceId))
.get(String.class); .get(String.class);
app = OBJECT_MAPPER.readValue(yarnFile, Service.class); app = mapper.readValue(yarnFile, Service.class);
entry.setYarnfile(app); entry.setYarnfile(app);
} catch (UniformInterfaceException | IOException e) { } catch (UniformInterfaceException | IOException e) {
LOG.error("Error in fetching application status: ", e); LOG.error("Error in fetching application status: ", e);
@ -180,9 +174,11 @@ public void getStatus(AppEntry entry) {
} }
public void upgradeApp(Service app) throws JsonProcessingException { public void upgradeApp(Service app) throws JsonProcessingException {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
String appInstanceId = app.getName(); String appInstanceId = app.getName();
app.setState(ServiceState.EXPRESS_UPGRADING); app.setState(ServiceState.EXPRESS_UPGRADING);
String yarnFile = OBJECT_MAPPER.writeValueAsString(app); String yarnFile = mapper.writeValueAsString(app);
ClientResponse response; ClientResponse response;
try { try {
response = asc.getApiClient(asc.getServicePath(appInstanceId)) response = asc.getApiClient(asc.getServicePath(appInstanceId))

View File

@ -26,7 +26,6 @@
import org.apache.hadoop.registry.client.binding.RegistryPathUtils; import org.apache.hadoop.registry.client.binding.RegistryPathUtils;
import org.apache.hadoop.registry.client.types.ServiceRecord; import org.apache.hadoop.registry.client.types.ServiceRecord;
import org.apache.hadoop.registry.client.types.yarn.PersistencePolicies; import org.apache.hadoop.registry.client.types.yarn.PersistencePolicies;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerExitStatus; import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
@ -876,7 +875,7 @@ public void updateContainerStatus(ContainerStatus status) {
doRegistryUpdate = false; doRegistryUpdate = false;
} }
} }
final ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); ObjectMapper mapper = new ObjectMapper();
try { try {
Map<String, List<Map<String, String>>> ports = null; Map<String, List<Map<String, String>>> ports = null;
ports = mapper.readValue(status.getExposedPorts(), ports = mapper.readValue(status.getExposedPorts(),

View File

@ -30,7 +30,6 @@
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.JacksonUtil;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -62,10 +61,9 @@ public class JsonSerDeser<T> {
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
public JsonSerDeser(Class<T> classType) { public JsonSerDeser(Class<T> classType) {
this.classType = classType; this.classType = classType;
this.mapper = JacksonUtil.createBasicObjectMapper(); this.mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.configure(SerializationFeature.WRITE_NULL_MAP_VALUES, false); mapper.configure(SerializationFeature.WRITE_NULL_MAP_VALUES, false);
mapper.configure(SerializationFeature.INDENT_OUTPUT, true);
} }
public JsonSerDeser(Class<T> classType, PropertyNamingStrategy namingStrategy) { public JsonSerDeser(Class<T> classType, PropertyNamingStrategy namingStrategy) {
@ -233,6 +231,7 @@ private void writeJsonAsBytes(T instance,
* @throws JsonProcessingException parse problems * @throws JsonProcessingException parse problems
*/ */
public String toJson(T instance) throws JsonProcessingException { public String toJson(T instance) throws JsonProcessingException {
mapper.configure(SerializationFeature.INDENT_OUTPUT, true);
return mapper.writeValueAsString(instance); return mapper.writeValueAsString(instance);
} }

View File

@ -23,7 +23,6 @@
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature; import com.fasterxml.jackson.databind.SerializationFeature;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.service.exceptions.BadConfigException; import org.apache.hadoop.yarn.service.exceptions.BadConfigException;
import java.io.IOException; import java.io.IOException;
@ -42,18 +41,6 @@
@JsonInclude(value = JsonInclude.Include.NON_NULL) @JsonInclude(value = JsonInclude.Include.NON_NULL)
public class PublishedConfiguration { public class PublishedConfiguration {
/**
* It is more performant to reuse ObjectMapper instances but keeping the instance
* private makes it harder for someone to reconfigure it which might have unwanted
* side effects.
*/
private static final ObjectMapper OBJECT_MAPPER;
static {
OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper();
OBJECT_MAPPER.configure(SerializationFeature.INDENT_OUTPUT, true);
}
public String description; public String description;
public long updated; public long updated;
@ -167,7 +154,9 @@ public Properties asProperties() {
* @throws IOException marshalling failure * @throws IOException marshalling failure
*/ */
public String asJson() throws IOException { public String asJson() throws IOException {
String json = OBJECT_MAPPER.writeValueAsString(entries); ObjectMapper mapper = new ObjectMapper();
mapper.configure(SerializationFeature.INDENT_OUTPUT, true);
String json = mapper.writeValueAsString(entries);
return json; return json;
} }

View File

@ -49,7 +49,6 @@
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
@ -61,6 +60,7 @@
import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.exceptions.YarnException;
import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.util.MinimalPrettyPrinter; import com.fasterxml.jackson.core.util.MinimalPrettyPrinter;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
@ -274,7 +274,7 @@ public void flush() throws IOException {
} }
private ObjectMapper createObjectMapper() { private ObjectMapper createObjectMapper() {
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); ObjectMapper mapper = new ObjectMapper();
mapper.setAnnotationIntrospector( mapper.setAnnotationIntrospector(
new JaxbAnnotationIntrospector(TypeFactory.defaultInstance())); new JaxbAnnotationIntrospector(TypeFactory.defaultInstance()));
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
@ -365,8 +365,8 @@ public long getLastModifiedTime() {
protected void prepareForWrite() throws IOException{ protected void prepareForWrite() throws IOException{
this.stream = createLogFileStream(fs, logPath); this.stream = createLogFileStream(fs, logPath);
this.jsonGenerator = JacksonUtil.getSharedWriter() this.jsonGenerator = new JsonFactory().createGenerator(
.createGenerator((OutputStream)stream); (OutputStream)stream);
this.jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n")); this.jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
this.lastModifiedTime = Time.monotonicNow(); this.lastModifiedTime = Time.monotonicNow();
} }

View File

@ -30,7 +30,6 @@
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler; import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler; import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
import org.apache.hadoop.util.JacksonUtil;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
@ -63,7 +62,7 @@ public class TimelineClientImpl extends TimelineClient {
private static final Logger LOG = private static final Logger LOG =
LoggerFactory.getLogger(TimelineClientImpl.class); LoggerFactory.getLogger(TimelineClientImpl.class);
private static final ObjectMapper MAPPER = JacksonUtil.createBasicObjectMapper(); private static final ObjectMapper MAPPER = new ObjectMapper();
private static final String RESOURCE_URI_STR_V1 = "/ws/v1/timeline/"; private static final String RESOURCE_URI_STR_V1 = "/ws/v1/timeline/";
private static Options opts; private static Options opts;

View File

@ -27,9 +27,9 @@
import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.security.DockerCredentialTokenIdentifier; import org.apache.hadoop.yarn.security.DockerCredentialTokenIdentifier;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
@ -96,8 +96,9 @@ public static Credentials readCredentialsFromConfigFile(Path configFile,
} }
// Parse the JSON and create the Tokens/Credentials. // Parse the JSON and create the Tokens/Credentials.
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); ObjectMapper mapper = new ObjectMapper();
JsonParser parser = mapper.createParser(contents); JsonFactory factory = mapper.getFactory();
JsonParser parser = factory.createParser(contents);
JsonNode rootNode = mapper.readTree(parser); JsonNode rootNode = mapper.readTree(parser);
Credentials credentials = new Credentials(); Credentials credentials = new Credentials();
@ -160,7 +161,7 @@ public static boolean writeDockerCredentialsToPath(File outConfigFile,
Credentials credentials) throws IOException { Credentials credentials) throws IOException {
boolean foundDockerCred = false; boolean foundDockerCred = false;
if (credentials.numberOfTokens() > 0) { if (credentials.numberOfTokens() > 0) {
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); ObjectMapper mapper = new ObjectMapper();
ObjectNode rootNode = mapper.createObjectNode(); ObjectNode rootNode = mapper.createObjectNode();
ObjectNode registryUrlNode = mapper.createObjectNode(); ObjectNode registryUrlNode = mapper.createObjectNode();
for (Token<? extends TokenIdentifier> tk : credentials.getAllTokens()) { for (Token<? extends TokenIdentifier> tk : credentials.getAllTokens()) {

View File

@ -31,7 +31,6 @@
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.VersionInfo; import org.apache.hadoop.util.VersionInfo;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.timeline.TimelineAbout; import org.apache.hadoop.yarn.api.records.timeline.TimelineAbout;
@ -54,10 +53,11 @@ public class TimelineUtils {
"TIMELINE_FLOW_RUN_ID_TAG"; "TIMELINE_FLOW_RUN_ID_TAG";
public final static String DEFAULT_FLOW_VERSION = "1"; public final static String DEFAULT_FLOW_VERSION = "1";
private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); private static ObjectMapper mapper;
static { static {
YarnJacksonJaxbJsonProvider.configObjectMapper(OBJECT_MAPPER); mapper = new ObjectMapper();
YarnJacksonJaxbJsonProvider.configObjectMapper(mapper);
} }
/** /**
@ -90,9 +90,9 @@ public static String dumpTimelineRecordtoJSON(Object o)
public static String dumpTimelineRecordtoJSON(Object o, boolean pretty) public static String dumpTimelineRecordtoJSON(Object o, boolean pretty)
throws JsonGenerationException, JsonMappingException, IOException { throws JsonGenerationException, JsonMappingException, IOException {
if (pretty) { if (pretty) {
return OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(o); return mapper.writerWithDefaultPrettyPrinter().writeValueAsString(o);
} else { } else {
return OBJECT_MAPPER.writeValueAsString(o); return mapper.writeValueAsString(o);
} }
} }

View File

@ -28,8 +28,8 @@
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.webapp.view.DefaultPage; import org.apache.hadoop.yarn.webapp.view.DefaultPage;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -42,6 +42,7 @@
@InterfaceAudience.LimitedPrivate({"YARN", "MapReduce"}) @InterfaceAudience.LimitedPrivate({"YARN", "MapReduce"})
public abstract class Controller implements Params { public abstract class Controller implements Params {
public static final Logger LOG = LoggerFactory.getLogger(Controller.class); public static final Logger LOG = LoggerFactory.getLogger(Controller.class);
static final ObjectMapper jsonMapper = new ObjectMapper();
@RequestScoped @RequestScoped
public static class RequestContext{ public static class RequestContext{
@ -224,7 +225,7 @@ protected void renderJSON(Object object) {
context().rendered = true; context().rendered = true;
context().response.setContentType(MimeType.JSON); context().response.setContentType(MimeType.JSON);
try { try {
JacksonUtil.getSharedWriter().writeValue(writer(), object); jsonMapper.writeValue(writer(), object);
} catch (Exception e) { } catch (Exception e) {
throw new WebAppException(e); throw new WebAppException(e);
} }

View File

@ -19,11 +19,11 @@
import java.io.IOException; import java.io.IOException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter; import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.util.JacksonUtil;
/** /**
* A utility class providing methods for serializing and deserializing * A utility class providing methods for serializing and deserializing
@ -38,8 +38,14 @@
public class GenericObjectMapper { public class GenericObjectMapper {
private static final byte[] EMPTY_BYTES = new byte[0]; private static final byte[] EMPTY_BYTES = new byte[0];
public static final ObjectReader OBJECT_READER = JacksonUtil.createBasicReaderFor(Object.class); public static final ObjectReader OBJECT_READER;
public static final ObjectWriter OBJECT_WRITER = JacksonUtil.getSharedWriter(); public static final ObjectWriter OBJECT_WRITER;
static {
ObjectMapper mapper = new ObjectMapper();
OBJECT_READER = mapper.reader(Object.class);
OBJECT_WRITER = mapper.writer();
}
/** /**
* Serializes an Object into a byte array. Along with {@link #read(byte[])}, * Serializes an Object into a byte array. Along with {@link #read(byte[])},

View File

@ -43,7 +43,6 @@
import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.records.AuxServiceConfiguration; import org.apache.hadoop.yarn.server.nodemanager.containermanager.records.AuxServiceConfiguration;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.records.AuxServiceFile; import org.apache.hadoop.yarn.server.nodemanager.containermanager.records.AuxServiceFile;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.records.AuxServiceRecord; import org.apache.hadoop.yarn.server.nodemanager.containermanager.records.AuxServiceRecord;
@ -136,7 +135,7 @@ public class AuxServices extends AbstractService
this.dirsHandler = nmContext.getLocalDirsHandler(); this.dirsHandler = nmContext.getLocalDirsHandler();
this.delService = deletionService; this.delService = deletionService;
this.userUGI = getRemoteUgi(); this.userUGI = getRemoteUgi();
this.mapper = JacksonUtil.createBasicObjectMapper(); this.mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
// Obtain services from configuration in init() // Obtain services from configuration in init()
} }

View File

@ -28,11 +28,11 @@
import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
@ -58,8 +58,9 @@ public void initialize(Configuration conf) {
+ " we have to set the configuration:" + + " we have to set the configuration:" +
YarnConfiguration.NM_NETWORK_TAG_MAPPING_FILE_PATH); YarnConfiguration.NM_NETWORK_TAG_MAPPING_FILE_PATH);
} }
ObjectMapper mapper = new ObjectMapper();
try { try {
networkTagMapping = JacksonUtil.getSharedReader().readValue(new File(mappingJsonFile), networkTagMapping = mapper.readValue(new File(mappingJsonFile),
NetworkTagMapping.class); NetworkTagMapping.class);
} catch (Exception e) { } catch (Exception e) {
throw new YarnRuntimeException(e); throw new YarnRuntimeException(e);

View File

@ -27,7 +27,6 @@
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.protocol.datatransfer.IOStreamPair; import org.apache.hadoop.hdfs.protocol.datatransfer.IOStreamPair;
import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.concurrent.HadoopExecutors; import org.apache.hadoop.util.concurrent.HadoopExecutors;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
@ -92,7 +91,6 @@
import static org.apache.hadoop.yarn.conf.YarnConfiguration.NM_RUNC_MANIFEST_TO_RESOURCES_PLUGIN; import static org.apache.hadoop.yarn.conf.YarnConfiguration.NM_RUNC_MANIFEST_TO_RESOURCES_PLUGIN;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.NM_REAP_RUNC_LAYER_MOUNTS_INTERVAL; import static org.apache.hadoop.yarn.conf.YarnConfiguration.NM_REAP_RUNC_LAYER_MOUNTS_INTERVAL;
import static org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.LinuxContainerRuntimeConstants.*; import static org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.LinuxContainerRuntimeConstants.*;
/** /**
* <p>This class is an extension of {@link OCIContainerRuntime} that uses the * <p>This class is an extension of {@link OCIContainerRuntime} that uses the
* native {@code container-executor} binary via a * native {@code container-executor} binary via a
@ -208,7 +206,7 @@ public void initialize(Configuration configuration, Context nmCtx)
imageTagToManifestPlugin.init(conf); imageTagToManifestPlugin.init(conf);
manifestToResourcesPlugin = chooseManifestToResourcesPlugin(); manifestToResourcesPlugin = chooseManifestToResourcesPlugin();
manifestToResourcesPlugin.init(conf); manifestToResourcesPlugin.init(conf);
mapper = JacksonUtil.createBasicObjectMapper(); mapper = new ObjectMapper();
defaultRuncImage = conf.get(YarnConfiguration.NM_RUNC_IMAGE_NAME); defaultRuncImage = conf.get(YarnConfiguration.NM_RUNC_IMAGE_NAME);
allowedNetworks.clear(); allowedNetworks.clear();

View File

@ -26,7 +26,6 @@
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.concurrent.HadoopExecutors; import org.apache.hadoop.util.concurrent.HadoopExecutors;
import java.io.BufferedReader; import java.io.BufferedReader;
@ -43,6 +42,7 @@
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -65,6 +65,7 @@ public class ImageTagToManifestPlugin extends AbstractService
implements RuncImageTagToManifestPlugin { implements RuncImageTagToManifestPlugin {
private Map<String, ImageManifest> manifestCache; private Map<String, ImageManifest> manifestCache;
private ObjectMapper objMapper;
private AtomicReference<Map<String, String>> localImageToHashCache = private AtomicReference<Map<String, String>> localImageToHashCache =
new AtomicReference<>(new HashMap<>()); new AtomicReference<>(new HashMap<>());
private AtomicReference<Map<String, String>> hdfsImageToHashCache = private AtomicReference<Map<String, String>> hdfsImageToHashCache =
@ -106,7 +107,7 @@ public ImageManifest getManifestFromImageTag(String imageTag)
} }
byte[] bytes = IOUtils.toByteArray(input); byte[] bytes = IOUtils.toByteArray(input);
manifest = JacksonUtil.getSharedReader().readValue(bytes, ImageManifest.class); manifest = objMapper.readValue(bytes, ImageManifest.class);
manifestCache.put(hash, manifest); manifestCache.put(hash, manifest);
return manifest; return manifest;
@ -278,6 +279,7 @@ protected void serviceInit(Configuration configuration) throws Exception {
DEFAULT_NM_RUNC_IMAGE_TOPLEVEL_DIR) + "/manifests/"; DEFAULT_NM_RUNC_IMAGE_TOPLEVEL_DIR) + "/manifests/";
int numManifestsToCache = conf.getInt(NM_RUNC_NUM_MANIFESTS_TO_CACHE, int numManifestsToCache = conf.getInt(NM_RUNC_NUM_MANIFESTS_TO_CACHE,
DEFAULT_NUM_MANIFESTS_TO_CACHE); DEFAULT_NUM_MANIFESTS_TO_CACHE);
this.objMapper = new ObjectMapper();
this.manifestCache = Collections.synchronizedMap( this.manifestCache = Collections.synchronizedMap(
new LRUCache(numManifestsToCache, 0.75f)); new LRUCache(numManifestsToCache, 0.75f));
@ -313,7 +315,7 @@ protected void serviceStop() throws Exception {
} }
private static class LRUCache extends LinkedHashMap<String, ImageManifest> { private static class LRUCache extends LinkedHashMap<String, ImageManifest> {
private final int cacheSize; private int cacheSize;
LRUCache(int initialCapacity, float loadFactor) { LRUCache(int initialCapacity, float loadFactor) {
super(initialCapacity, loadFactor, true); super(initialCapacity, loadFactor, true);

View File

@ -20,10 +20,10 @@
import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.classification.VisibleForTesting;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceInformation; import org.apache.hadoop.yarn.api.records.ResourceInformation;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
@ -105,7 +105,8 @@ private void loadProfiles() throws IOException {
resourcesFile = tmp.getPath(); resourcesFile = tmp.getPath();
} }
} }
Map data = JacksonUtil.getSharedReader().readValue(new File(resourcesFile), Map.class); ObjectMapper mapper = new ObjectMapper();
Map data = mapper.readValue(new File(resourcesFile), Map.class);
Iterator iterator = data.entrySet().iterator(); Iterator iterator = data.entrySet().iterator();
while (iterator.hasNext()) { while (iterator.hasNext()) {
Map.Entry entry = (Map.Entry) iterator.next(); Map.Entry entry = (Map.Entry) iterator.next();

View File

@ -27,7 +27,6 @@
import java.util.List; import java.util.List;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.server.resourcemanager.placement.csmappingrule.MappingRule; import org.apache.hadoop.yarn.server.resourcemanager.placement.csmappingrule.MappingRule;
import org.apache.hadoop.yarn.server.resourcemanager.placement.csmappingrule.MappingRuleAction; import org.apache.hadoop.yarn.server.resourcemanager.placement.csmappingrule.MappingRuleAction;
import org.apache.hadoop.yarn.server.resourcemanager.placement.csmappingrule.MappingRuleActions; import org.apache.hadoop.yarn.server.resourcemanager.placement.csmappingrule.MappingRuleActions;
@ -44,6 +43,7 @@
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.classification.VisibleForTesting;
public class MappingRuleCreator { public class MappingRuleCreator {
@ -58,12 +58,14 @@ public MappingRulesDescription getMappingRulesFromJsonFile(String filePath)
MappingRulesDescription getMappingRulesFromJson(byte[] contents) MappingRulesDescription getMappingRulesFromJson(byte[] contents)
throws IOException { throws IOException {
return JacksonUtil.getSharedReader().readValue(contents, MappingRulesDescription.class); ObjectMapper objectMapper = new ObjectMapper();
return objectMapper.readValue(contents, MappingRulesDescription.class);
} }
MappingRulesDescription getMappingRulesFromJson(String contents) MappingRulesDescription getMappingRulesFromJson(String contents)
throws IOException { throws IOException {
return JacksonUtil.getSharedReader().readValue(contents, MappingRulesDescription.class); ObjectMapper objectMapper = new ObjectMapper();
return objectMapper.readValue(contents, MappingRulesDescription.class);
} }
public List<MappingRule> getMappingRulesFromFile(String jsonPath) public List<MappingRule> getMappingRulesFromFile(String jsonPath)

View File

@ -21,7 +21,6 @@
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.QueuePath; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.QueuePath;
@ -53,11 +52,9 @@ public class LegacyMappingRuleToJson {
public static final String JSON_NODE_MATCHES = "matches"; public static final String JSON_NODE_MATCHES = "matches";
/** /**
* It is more performant to reuse ObjectMapper instances but keeping the instance * Our internal object mapper, used to create JSON nodes.
* private makes it harder for someone to reconfigure it which might have unwanted
* side effects.
*/ */
private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); private ObjectMapper objectMapper = new ObjectMapper();
/** /**
* Collection to store the legacy group mapping rule strings. * Collection to store the legacy group mapping rule strings.
@ -141,8 +138,8 @@ public LegacyMappingRuleToJson setAppNameMappingRules(
*/ */
public String convert() { public String convert() {
//creating the basic JSON config structure //creating the basic JSON config structure
ObjectNode rootNode = OBJECT_MAPPER.createObjectNode(); ObjectNode rootNode = objectMapper.createObjectNode();
ArrayNode rulesNode = OBJECT_MAPPER.createArrayNode(); ArrayNode rulesNode = objectMapper.createArrayNode();
rootNode.set("rules", rulesNode); rootNode.set("rules", rulesNode);
//Processing and adding all the user group mapping rules //Processing and adding all the user group mapping rules
@ -161,7 +158,7 @@ public String convert() {
} }
try { try {
return OBJECT_MAPPER return objectMapper
.writerWithDefaultPrettyPrinter() .writerWithDefaultPrettyPrinter()
.writeValueAsString(rootNode); .writeValueAsString(rootNode);
} catch (JsonProcessingException e) { } catch (JsonProcessingException e) {
@ -249,7 +246,7 @@ private String[] splitRule(String rule, int expectedParts) {
* @return The object node with the preset fields * @return The object node with the preset fields
*/ */
private ObjectNode createDefaultRuleNode(String type) { private ObjectNode createDefaultRuleNode(String type) {
return OBJECT_MAPPER return objectMapper
.createObjectNode() .createObjectNode()
.put("type", type) .put("type", type)
//All legacy rule fallback to place to default //All legacy rule fallback to place to default

View File

@ -32,7 +32,6 @@
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.api.records.QueueACL; import org.apache.hadoop.yarn.api.records.QueueACL;
import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
@ -56,6 +55,7 @@
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.util.DefaultPrettyPrinter;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter; import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.classification.VisibleForTesting;
@ -327,14 +327,14 @@ private void performRuleConversion(FairScheduler fs)
placementConverter.convertPlacementPolicy(placementManager, placementConverter.convertPlacementPolicy(placementManager,
ruleHandler, capacitySchedulerConfig, usePercentages); ruleHandler, capacitySchedulerConfig, usePercentages);
final ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); ObjectMapper mapper = new ObjectMapper();
// close output stream if we write to a file, leave it open otherwise // close output stream if we write to a file, leave it open otherwise
if (!consoleMode && rulesToFile) { if (!consoleMode && rulesToFile) {
mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, true); mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, true);
} else { } else {
mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false); mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false);
} }
ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter(); ObjectWriter writer = mapper.writer(new DefaultPrettyPrinter());
if (consoleMode && rulesToFile) { if (consoleMode && rulesToFile) {
System.out.println("======= " + MAPPING_RULES_JSON + " ======="); System.out.println("======= " + MAPPING_RULES_JSON + " =======");

View File

@ -42,7 +42,6 @@
import org.apache.hadoop.service.ServiceOperations; import org.apache.hadoop.service.ServiceOperations;
import org.apache.hadoop.ipc.CallerContext; import org.apache.hadoop.ipc.CallerContext;
import org.apache.hadoop.util.ApplicationClassLoader; import org.apache.hadoop.util.ApplicationClassLoader;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
@ -321,7 +320,7 @@ protected void serviceStart() throws Exception {
} }
} }
objMapper = JacksonUtil.createBasicObjectMapper(); objMapper = new ObjectMapper();
objMapper.setAnnotationIntrospector( objMapper.setAnnotationIntrospector(
new JaxbAnnotationIntrospector(TypeFactory.defaultInstance())); new JaxbAnnotationIntrospector(TypeFactory.defaultInstance()));
jsonFactory = new MappingJsonFactory(objMapper); jsonFactory = new MappingJsonFactory(objMapper);

View File

@ -18,13 +18,13 @@
package org.apache.hadoop.yarn.server.timeline; package org.apache.hadoop.yarn.server.timeline;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils; import org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils;
@ -298,6 +298,7 @@ public void close() throws IOException {
} }
}; };
} }
static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private V getEntityForKey(byte[] key) throws IOException { private V getEntityForKey(byte[] key) throws IOException {
@ -305,7 +306,7 @@ private V getEntityForKey(byte[] key) throws IOException {
if (resultRaw == null) { if (resultRaw == null) {
return null; return null;
} }
return (V) JacksonUtil.getSharedReader().readValue(resultRaw, TimelineEntity.class); return (V) OBJECT_MAPPER.readValue(resultRaw, TimelineEntity.class);
} }
private byte[] getStartTimeKey(K entityId) { private byte[] getStartTimeKey(K entityId) {

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.yarn.server.timeline; package org.apache.hadoop.yarn.server.timeline;
import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.util.MinimalPrettyPrinter; import com.fasterxml.jackson.core.util.MinimalPrettyPrinter;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
@ -30,7 +31,6 @@
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
@ -108,7 +108,7 @@ static FSDataOutputStream createLogFile(Path logPath, FileSystem fs)
} }
static ObjectMapper createObjectMapper() { static ObjectMapper createObjectMapper() {
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper(); ObjectMapper mapper = new ObjectMapper();
mapper.setAnnotationIntrospector( mapper.setAnnotationIntrospector(
new JaxbAnnotationIntrospector(TypeFactory.defaultInstance())); new JaxbAnnotationIntrospector(TypeFactory.defaultInstance()));
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
@ -230,9 +230,10 @@ static TimelineEvent createEvent(long timestamp, String type, Map<String,
static void writeEntities(TimelineEntities entities, Path logPath, static void writeEntities(TimelineEntities entities, Path logPath,
FileSystem fs) throws IOException { FileSystem fs) throws IOException {
FSDataOutputStream outStream = createLogFile(logPath, fs); FSDataOutputStream outStream = createLogFile(logPath, fs);
ObjectMapper objMapper = createObjectMapper(); JsonGenerator jsonGenerator
JsonGenerator jsonGenerator = objMapper.createGenerator((OutputStream)outStream); = new JsonFactory().createGenerator((OutputStream)outStream);
jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n")); jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
ObjectMapper objMapper = createObjectMapper();
for (TimelineEntity entity : entities.getEntities()) { for (TimelineEntity entity : entities.getEntities()) {
objMapper.writeValue(jsonGenerator, entity); objMapper.writeValue(jsonGenerator, entity);
} }

View File

@ -23,7 +23,6 @@
import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.util.JacksonUtil;
/** /**
* A simple util class for Json SerDe. * A simple util class for Json SerDe.
@ -32,7 +31,7 @@ public final class JsonUtils {
private JsonUtils(){} private JsonUtils(){}
private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
static { static {
OBJECT_MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); OBJECT_MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);

View File

@ -46,7 +46,6 @@
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.api.records.timeline.TimelineHealth; import org.apache.hadoop.yarn.api.records.timeline.TimelineHealth;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent;
@ -105,10 +104,11 @@ String getRootPath() {
return rootPath.toString(); return rootPath.toString();
} }
private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper(); private static ObjectMapper mapper;
static { static {
YarnJacksonJaxbJsonProvider.configObjectMapper(OBJECT_MAPPER); mapper = new ObjectMapper();
YarnJacksonJaxbJsonProvider.configObjectMapper(mapper);
} }
/** /**
@ -127,7 +127,7 @@ String getRootPath() {
public static <T> T getTimelineRecordFromJSON( public static <T> T getTimelineRecordFromJSON(
String jsonString, Class<T> clazz) String jsonString, Class<T> clazz)
throws JsonGenerationException, JsonMappingException, IOException { throws JsonGenerationException, JsonMappingException, IOException {
return OBJECT_MAPPER.readValue(jsonString, clazz); return mapper.readValue(jsonString, clazz);
} }
private static void fillFields(TimelineEntity finalEntity, private static void fillFields(TimelineEntity finalEntity,