Revert "HADOOP-19231. Add JacksonUtil to manage Jackson classes (#6953)"

This reverts commit fa9bb0d1ac.
This commit is contained in:
Ayush Saxena 2024-08-29 00:29:19 +05:30
parent 0aab1a2976
commit 0837c84a9f
No known key found for this signature in database
GPG Key ID: D09AE71061AB564D
71 changed files with 296 additions and 392 deletions

View File

@ -22,6 +22,7 @@
import com.ctc.wstx.io.StreamBootstrapper;
import com.ctc.wstx.io.SystemId;
import com.ctc.wstx.stax.WstxInputFactory;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import java.io.BufferedInputStream;
@ -100,7 +101,6 @@
import org.apache.hadoop.security.alias.CredentialProviderFactory;
import org.apache.hadoop.thirdparty.com.google.common.base.Strings;
import org.apache.hadoop.util.ConfigurationHelper;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringInterner;
@ -3792,7 +3792,8 @@ public static void dumpConfiguration(Configuration config,
throw new IllegalArgumentException("Property " +
propertyName + " not found");
} else {
JsonGenerator dumpGenerator = JacksonUtil.getSharedWriter().createGenerator(out);
JsonFactory dumpFactory = new JsonFactory();
JsonGenerator dumpGenerator = dumpFactory.createGenerator(out);
dumpGenerator.writeStartObject();
dumpGenerator.writeFieldName("property");
appendJSONProperty(dumpGenerator, config, propertyName,
@ -3830,7 +3831,8 @@ public static void dumpConfiguration(Configuration config,
*/
public static void dumpConfiguration(Configuration config,
Writer out) throws IOException {
JsonGenerator dumpGenerator = JacksonUtil.getSharedWriter().createGenerator(out);
JsonFactory dumpFactory = new JsonFactory();
JsonGenerator dumpGenerator = dumpFactory.createGenerator(out);
dumpGenerator.writeStartObject();
dumpGenerator.writeFieldName("properties");
dumpGenerator.writeStartArray();

View File

@ -42,7 +42,6 @@
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSelector;
import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL;
import org.apache.hadoop.util.HttpExceptionUtils;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.JsonSerialization;
import org.apache.hadoop.util.KMSUtil;
import org.apache.http.client.utils.URIBuilder;
@ -80,6 +79,7 @@
import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.base.Strings;
@ -595,10 +595,11 @@ private <T> T call(HttpURLConnection conn, Object jsonOutput,
&& conn.getContentType().trim().toLowerCase()
.startsWith(APPLICATION_JSON_MIME)
&& klass != null) {
ObjectMapper mapper = new ObjectMapper();
InputStream is = null;
try {
is = conn.getInputStream();
ret = JacksonUtil.getSharedReader().readValue(is, klass);
ret = mapper.readValue(is, klass);
} finally {
IOUtils.closeStream(is);
}

View File

@ -38,10 +38,10 @@
import javax.management.ObjectName;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.AtomicDoubleArray;
import org.apache.commons.lang3.exception.ExceptionUtils;
@ -146,7 +146,7 @@ public class DecayRpcScheduler implements RpcScheduler,
public static final Logger LOG =
LoggerFactory.getLogger(DecayRpcScheduler.class);
private static final ObjectWriter WRITER = JacksonUtil.getSharedWriter();
private static final ObjectWriter WRITER = new ObjectMapper().writer();
// Track the decayed and raw (no decay) number of calls for each schedulable
// identity from all previous decay windows: idx 0 for decayed call cost and

View File

@ -121,7 +121,6 @@
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.ExitUtil;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.ProtoUtil;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Time;
@ -131,6 +130,7 @@
import org.apache.hadoop.tracing.TraceScope;
import org.apache.hadoop.tracing.Tracer;
import org.apache.hadoop.tracing.TraceUtils;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
@ -3843,8 +3843,9 @@ public int getNumOpenConnections() {
* @return Get the NumOpenConnections/User.
*/
public String getNumOpenConnectionsPerUser() {
ObjectMapper mapper = new ObjectMapper();
try {
return JacksonUtil.getSharedWriter()
return mapper
.writeValueAsString(connectionManager.getUserToConnectionsMap());
} catch (IOException ignored) {
}

View File

@ -43,13 +43,13 @@
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.lang3.NotImplementedException;
import org.apache.hadoop.http.HttpServer2;
import org.apache.hadoop.util.JacksonUtil;
/*
* This servlet is based off of the JMXProxyServlet from Tomcat 7.0.14. It has
@ -134,6 +134,11 @@ public class JMXJsonServlet extends HttpServlet {
*/
protected transient MBeanServer mBeanServer;
/**
* Json Factory to create Json generators for write objects in json format
*/
protected transient JsonFactory jsonFactory;
/**
* Initialize this servlet.
*/
@ -141,6 +146,7 @@ public class JMXJsonServlet extends HttpServlet {
public void init() throws ServletException {
// Retrieve the MBean server
mBeanServer = ManagementFactory.getPlatformMBeanServer();
jsonFactory = new JsonFactory();
}
protected boolean isInstrumentationAccessAllowed(HttpServletRequest request,
@ -181,7 +187,7 @@ public void doGet(HttpServletRequest request, HttpServletResponse response) {
response.setHeader(ACCESS_CONTROL_ALLOW_METHODS, "GET");
response.setHeader(ACCESS_CONTROL_ALLOW_ORIGIN, "*");
jg = JacksonUtil.getSharedWriter().createGenerator(writer);
jg = jsonFactory.createGenerator(writer);
jg.disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET);
jg.useDefaultPrettyPrinter();
jg.writeStartObject();

View File

@ -21,8 +21,8 @@
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.util.JacksonUtil;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -46,7 +46,8 @@ public class MetricsJsonBuilder extends MetricsRecordBuilder {
private final MetricsCollector parent;
private Map<String, Object> innerMetrics = new LinkedHashMap<>();
private static final ObjectWriter WRITER = JacksonUtil.getSharedWriter();
private static final ObjectWriter WRITER =
new ObjectMapper().writer();
/**
* Build an instance.

View File

@ -46,7 +46,6 @@
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager;
import org.apache.hadoop.util.HttpExceptionUtils;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -166,7 +165,7 @@ public void initTokenManager(Properties config) {
@VisibleForTesting
public void initJsonFactory(Properties config) {
boolean hasFeature = false;
JsonFactory tmpJsonFactory = JacksonUtil.createBasicJsonFactory();
JsonFactory tmpJsonFactory = new JsonFactory();
for (Map.Entry entry : config.entrySet()) {
String key = (String)entry.getKey();
@ -336,7 +335,7 @@ public boolean managementOperation(AuthenticationToken token,
if (map != null) {
response.setContentType(MediaType.APPLICATION_JSON);
Writer writer = response.getWriter();
ObjectMapper jsonMapper = JacksonUtil.createObjectMapper(jsonFactory);
ObjectMapper jsonMapper = new ObjectMapper(jsonFactory);
jsonMapper.writeValue(writer, map);
writer.write(ENTER);
writer.flush();

View File

@ -1,123 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.util;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.fasterxml.jackson.databind.json.JsonMapper;
import org.apache.hadoop.classification.InterfaceAudience.Private;
/**
* Utility for sharing code related to Jackson usage in Hadoop.
*/
@Private
public final class JacksonUtil {
private static final ObjectMapper SHARED_BASIC_OBJECT_MAPPER = createBasicObjectMapper();
private static final ObjectReader SHARED_BASIC_OBJECT_READER =
SHARED_BASIC_OBJECT_MAPPER.reader();
private static final ObjectWriter SHARED_BASIC_OBJECT_WRITER =
SHARED_BASIC_OBJECT_MAPPER.writer();
private static final ObjectWriter SHARED_BASIC_OBJECT_WRITER_PRETTY =
SHARED_BASIC_OBJECT_MAPPER.writerWithDefaultPrettyPrinter();
/**
* Creates a new {@link JsonFactory} instance with basic configuration.
*
* @return an {@link JsonFactory} with basic configuration
*/
public static JsonFactory createBasicJsonFactory() {
// deliberately return a new instance instead of sharing one because we can't trust
// that users won't modify this instance
return new JsonFactory();
}
/**
* Creates a new {@link ObjectMapper} instance with basic configuration.
*
* @return an {@link ObjectMapper} with basic configuration
*/
public static ObjectMapper createBasicObjectMapper() {
// deliberately return a new instance instead of sharing one because we can't trust
// that users won't modify this instance
return JsonMapper.builder(createBasicJsonFactory()).build();
}
/**
* Creates a new {@link ObjectMapper} instance based on the configuration
* in the input {@link JsonFactory}.
*
* @param jsonFactory a pre-configured {@link JsonFactory}
* @return an {@link ObjectMapper} with configuration set by the input {@link JsonFactory}.
*/
public static ObjectMapper createObjectMapper(final JsonFactory jsonFactory) {
return JsonMapper.builder(jsonFactory).build();
}
/**
* Returns a shared {@link ObjectReader} instance with basic configuration.
*
* @return a shared {@link ObjectReader} instance with basic configuration
*/
public static ObjectReader getSharedReader() {
return SHARED_BASIC_OBJECT_READER;
}
/**
* Returns an {@link ObjectReader} for the given type instance with basic configuration.
*
* @param type the class that the reader has to support
* @return an {@link ObjectReader} instance with basic configuration
*/
public static ObjectReader createBasicReaderFor(Class<?> type) {
return SHARED_BASIC_OBJECT_MAPPER.readerFor(type);
}
/**
* Returns a shared {@link ObjectWriter} instance with basic configuration.
*
* @return a shared {@link ObjectWriter} instance with basic configuration
*/
public static ObjectWriter getSharedWriter() {
return SHARED_BASIC_OBJECT_WRITER;
}
/**
* Returns a shared {@link ObjectWriter} instance with pretty print and basic configuration.
*
* @return a shared {@link ObjectWriter} instance with pretty print and basic configuration
*/
public static ObjectWriter getSharedWriterWithPrettyPrint() {
return SHARED_BASIC_OBJECT_WRITER_PRETTY;
}
/**
* Returns an {@link ObjectWriter} for the given type instance with basic configuration.
*
* @param type the class that the writer has to support
* @return an {@link ObjectWriter} instance with basic configuration
*/
public static ObjectWriter createBasicWriterFor(Class<?> type) {
return SHARED_BASIC_OBJECT_MAPPER.writerFor(type);
}
private JacksonUtil() {}
}

View File

@ -76,8 +76,11 @@ public class JsonSerialization<T> {
private final Class<T> classType;
private final ObjectMapper mapper;
private static final ObjectWriter WRITER = JacksonUtil.getSharedWriterWithPrettyPrint();
private static final ObjectReader MAP_READER = JacksonUtil.createBasicReaderFor(Map.class);
private static final ObjectWriter WRITER =
new ObjectMapper().writerWithDefaultPrettyPrinter();
private static final ObjectReader MAP_READER =
new ObjectMapper().readerFor(Map.class);
/**
* @return an ObjectWriter which pretty-prints its output
@ -103,7 +106,7 @@ public JsonSerialization(Class<T> classType,
boolean failOnUnknownProperties, boolean pretty) {
Preconditions.checkArgument(classType != null, "null classType");
this.classType = classType;
this.mapper = JacksonUtil.createBasicObjectMapper();
this.mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES,
failOnUnknownProperties);
mapper.configure(SerializationFeature.INDENT_OUTPUT, pretty);

View File

@ -17,8 +17,9 @@
*/
package org.apache.hadoop.crypto.key.kms.server;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.util.JacksonUtil;
import javax.ws.rs.Consumes;
import javax.ws.rs.WebApplicationException;
@ -37,6 +38,7 @@
@Consumes(MediaType.APPLICATION_JSON)
@InterfaceAudience.Private
public class KMSJSONReader implements MessageBodyReader<Object> {
private static final ObjectMapper MAPPER = new ObjectMapper();
@Override
public boolean isReadable(Class<?> type, Type genericType,
@ -50,6 +52,6 @@ public Object readFrom(Class<Object> type, Type genericType,
Annotation[] annotations, MediaType mediaType,
MultivaluedMap<String, String> httpHeaders, InputStream entityStream)
throws IOException, WebApplicationException {
return JacksonUtil.getSharedReader().readValue(entityStream, type);
return MAPPER.readValue(entityStream, type);
}
}

View File

@ -20,8 +20,8 @@
package org.apache.hadoop.hdfs.server.datanode;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@ -35,8 +35,9 @@
@InterfaceStability.Unstable
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
public class DiskBalancerWorkItem {
private static final ObjectMapper MAPPER = new ObjectMapper();
private static final ObjectReader READER =
JacksonUtil.createBasicReaderFor(DiskBalancerWorkItem.class);
new ObjectMapper().readerFor(DiskBalancerWorkItem.class);
private long startTime;
private long secondsElapsed;
@ -172,7 +173,7 @@ public void incBlocksCopied() {
* @throws IOException
*/
public String toJson() throws IOException {
return JacksonUtil.getSharedWriter().writeValueAsString(this);
return MAPPER.writeValueAsString(this);
}
/**

View File

@ -23,7 +23,6 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.SerializationFeature;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@ -40,13 +39,14 @@
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class DiskBalancerWorkStatus {
private static final ObjectMapper MAPPER = JacksonUtil.createBasicObjectMapper();
private static final ObjectMapper MAPPER = new ObjectMapper();
private static final ObjectMapper MAPPER_WITH_INDENT_OUTPUT =
JacksonUtil.createBasicObjectMapper().enable(SerializationFeature.INDENT_OUTPUT);
new ObjectMapper().enable(SerializationFeature.INDENT_OUTPUT);
private static final ObjectReader READER_WORKSTATUS =
MAPPER.readerFor(DiskBalancerWorkStatus.class);
private static final ObjectReader READER_WORKENTRY = MAPPER.readerFor(
defaultInstance().constructCollectionType(List.class, DiskBalancerWorkEntry.class));
new ObjectMapper().readerFor(DiskBalancerWorkStatus.class);
private static final ObjectReader READER_WORKENTRY = new ObjectMapper()
.readerFor(defaultInstance().constructCollectionType(List.class,
DiskBalancerWorkEntry.class));
private final List<DiskBalancerWorkEntry> currentState;
private Result result;

View File

@ -18,7 +18,9 @@
package org.apache.hadoop.hdfs.util;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import java.io.File;
@ -40,7 +42,6 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties;
import org.apache.hadoop.util.JacksonUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -82,6 +83,7 @@ private CombinedHostsFileReader() {
public static DatanodeAdminProperties[]
readFile(final String hostsFilePath) throws IOException {
DatanodeAdminProperties[] allDNs = new DatanodeAdminProperties[0];
ObjectMapper objectMapper = new ObjectMapper();
File hostFile = new File(hostsFilePath);
boolean tryOldFormat = false;
@ -89,8 +91,7 @@ private CombinedHostsFileReader() {
try (Reader input =
new InputStreamReader(
Files.newInputStream(hostFile.toPath()), StandardCharsets.UTF_8)) {
allDNs = JacksonUtil.getSharedReader()
.readValue(input, DatanodeAdminProperties[].class);
allDNs = objectMapper.readValue(input, DatanodeAdminProperties[].class);
} catch (JsonMappingException jme) {
// The old format doesn't have json top-level token to enclose
// the array.
@ -102,12 +103,15 @@ private CombinedHostsFileReader() {
}
if (tryOldFormat) {
ObjectReader objectReader = JacksonUtil.createBasicReaderFor(DatanodeAdminProperties.class);
ObjectReader objectReader =
objectMapper.readerFor(DatanodeAdminProperties.class);
JsonFactory jsonFactory = new JsonFactory();
List<DatanodeAdminProperties> all = new ArrayList<>();
try (Reader input =
new InputStreamReader(Files.newInputStream(Paths.get(hostsFilePath)),
StandardCharsets.UTF_8)) {
Iterator<DatanodeAdminProperties> iterator = objectReader.readValues(input);
Iterator<DatanodeAdminProperties> iterator =
objectReader.readValues(jsonFactory.createParser(input));
while (iterator.hasNext()) {
DatanodeAdminProperties properties = iterator.next();
all.add(properties);

View File

@ -26,11 +26,11 @@
import java.nio.file.Paths;
import java.util.Set;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties;
import org.apache.hadoop.util.JacksonUtil;
/**
* Writer support for JSON-based datanode configuration, an alternative format
@ -59,10 +59,12 @@ private CombinedHostsFileWriter() {
*/
public static void writeFile(final String hostsFile,
final Set<DatanodeAdminProperties> allDNs) throws IOException {
final ObjectMapper objectMapper = new ObjectMapper();
try (Writer output =
new OutputStreamWriter(Files.newOutputStream(Paths.get(hostsFile)),
StandardCharsets.UTF_8)) {
JacksonUtil.getSharedWriter().writeValue(output, allDNs);
objectMapper.writeValue(output, allDNs);
}
}
}

View File

@ -17,12 +17,12 @@
*/
package org.apache.hadoop.hdfs.web;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
import org.apache.hadoop.fs.ContentSummary;
@ -654,7 +654,7 @@ static List<String> toXAttrNames(final Map<?, ?> json)
}
final String namesInJson = (String) json.get("XAttrNames");
ObjectReader reader = JacksonUtil.createBasicReaderFor(List.class);
ObjectReader reader = new ObjectMapper().readerFor(List.class);
final List<Object> xattrs = reader.readValue(namesInJson);
final List<String> names =
Lists.newArrayListWithCapacity(json.keySet().size());

View File

@ -71,7 +71,6 @@
import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticator;
import org.apache.hadoop.security.token.delegation.web.KerberosDelegationTokenAuthenticator;
import org.apache.hadoop.util.HttpExceptionUtils;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Lists;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.ReflectionUtils;
@ -1819,7 +1818,7 @@ public Collection<FileStatus> getTrashRoots(boolean allUsers) {
@VisibleForTesting
static BlockLocation[] toBlockLocations(JSONObject json) throws IOException {
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
ObjectMapper mapper = new ObjectMapper();
MapType subType = mapper.getTypeFactory().constructMapType(Map.class,
String.class, BlockLocation[].class);
MapType rootType = mapper.getTypeFactory().constructMapType(Map.class,

View File

@ -21,6 +21,7 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList;
@ -31,7 +32,6 @@
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.server.protocol.SlowDiskReports;
import org.apache.hadoop.hdfs.server.protocol.SlowDiskReports.DiskOp;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Lists;
import org.apache.hadoop.util.Timer;
import org.slf4j.Logger;
@ -71,7 +71,7 @@ public class SlowDiskTracker {
/**
* ObjectWriter to convert JSON reports to String.
*/
private static final ObjectWriter WRITER = JacksonUtil.getSharedWriter();
private static final ObjectWriter WRITER = new ObjectMapper().writer();
/**
* Number of disks to include in JSON report per operation. We will return

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.hdfs.server.blockmanagement;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap;
@ -29,7 +30,6 @@
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.server.protocol.OutlierMetrics;
import org.apache.hadoop.hdfs.server.protocol.SlowPeerReports;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Timer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -75,8 +75,7 @@ public class SlowPeerTracker {
/**
* ObjectWriter to convert JSON reports to String.
*/
private static final ObjectWriter WRITER = JacksonUtil.getSharedWriter();
private static final ObjectWriter WRITER = new ObjectMapper().writer();
/**
* Number of nodes to include in JSON report. We will return nodes with
* the highest number of votes from peers.

View File

@ -79,18 +79,18 @@
import org.apache.hadoop.util.DataChecksum;
import org.apache.hadoop.util.DiskChecker.DiskErrorException;
import org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.util.Timer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
/**
@ -103,9 +103,10 @@
public class FsVolumeImpl implements FsVolumeSpi {
public static final Logger LOG =
LoggerFactory.getLogger(FsVolumeImpl.class);
private static final ObjectWriter WRITER = JacksonUtil.getSharedWriterWithPrettyPrint();
private static final ObjectWriter WRITER =
new ObjectMapper().writerWithDefaultPrettyPrinter();
private static final ObjectReader READER =
JacksonUtil.createBasicReaderFor(BlockIteratorState.class);
new ObjectMapper().readerFor(BlockIteratorState.class);
private final FsDatasetImpl dataset;
private final String storageID;

View File

@ -32,6 +32,7 @@
import java.util.concurrent.atomic.AtomicLong;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.hadoop.classification.InterfaceAudience;
@ -59,7 +60,6 @@
import org.apache.hadoop.hdfs.server.datanode.checker.VolumeCheckResult;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
import org.apache.hadoop.util.DiskChecker.DiskErrorException;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.util.Timer;
@ -369,7 +369,7 @@ public void releaseReservedSpace(long bytesToRelease) {
}
private static final ObjectWriter WRITER =
JacksonUtil.getSharedWriterWithPrettyPrint();
new ObjectMapper().writerWithDefaultPrettyPrinter();
private static class ProvidedBlockIteratorState {
ProvidedBlockIteratorState() {

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hdfs.server.diskbalancer.command;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.commons.cli.CommandLine;
@ -46,7 +47,6 @@
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.HostsFileReader;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Lists;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -77,7 +77,8 @@
* Common interface for command handling.
*/
public abstract class Command extends Configured implements Closeable {
private static final ObjectReader READER = JacksonUtil.createBasicReaderFor(HashMap.class);
private static final ObjectReader READER =
new ObjectMapper().readerFor(HashMap.class);
static final Logger LOG = LoggerFactory.getLogger(Command.class);
private Map<String, String> validArgs = new HashMap<>();
private URI clusterURI;

View File

@ -17,14 +17,15 @@
package org.apache.hadoop.hdfs.server.diskbalancer.connectors;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerCluster;
import org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerDataNode;
import org.apache.hadoop.hdfs.server.diskbalancer.datamodel
.DiskBalancerDataNode;
import java.io.File;
import java.net.URL;
@ -37,7 +38,7 @@ public class JsonNodeConnector implements ClusterConnector {
private static final Logger LOG =
LoggerFactory.getLogger(JsonNodeConnector.class);
private static final ObjectReader READER =
JacksonUtil.createBasicReaderFor(DiskBalancerCluster.class);
new ObjectMapper().readerFor(DiskBalancerCluster.class);
private final URL clusterURI;
/**

View File

@ -19,7 +19,9 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.hadoop.util.Preconditions;
import org.apache.commons.io.FileUtils;
import org.slf4j.Logger;
@ -29,8 +31,6 @@
import org.apache.hadoop.hdfs.server.diskbalancer.planner.Planner;
import org.apache.hadoop.hdfs.server.diskbalancer.planner.PlannerFactory;
import org.apache.hadoop.hdfs.web.JsonUtil;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Preconditions;
import java.io.File;
import java.io.IOException;
@ -73,7 +73,7 @@ public class DiskBalancerCluster {
private static final Logger LOG =
LoggerFactory.getLogger(DiskBalancerCluster.class);
private static final ObjectReader READER =
JacksonUtil.createBasicReaderFor(DiskBalancerCluster.class);
new ObjectMapper().readerFor(DiskBalancerCluster.class);
private final Set<String> exclusionList;
private final Set<String> inclusionList;
private ClusterConnector clusterConnector;

View File

@ -19,10 +19,10 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.hadoop.hdfs.web.JsonUtil;
import org.apache.hadoop.util.JacksonUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -34,7 +34,7 @@
@JsonIgnoreProperties(ignoreUnknown = true)
public class DiskBalancerVolume {
private static final ObjectReader READER =
JacksonUtil.createBasicReaderFor(DiskBalancerVolume.class);
new ObjectMapper().readerFor(DiskBalancerVolume.class);
private static final Logger LOG =
LoggerFactory.getLogger(DiskBalancerVolume.class);

View File

@ -18,9 +18,9 @@
package org.apache.hadoop.hdfs.server.diskbalancer.planner;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Preconditions;
import java.io.IOException;
@ -39,8 +39,10 @@ public class NodePlan {
private int port;
private long timeStamp;
private static final ObjectReader READER = JacksonUtil.createBasicReaderFor(NodePlan.class);
private static final ObjectWriter WRITER = JacksonUtil.createBasicWriterFor(NodePlan.class);
private static final ObjectMapper MAPPER = new ObjectMapper();
private static final ObjectReader READER = MAPPER.readerFor(NodePlan.class);
private static final ObjectWriter WRITER = MAPPER.writerFor(
MAPPER.constructType(NodePlan.class));
/**
* returns timestamp when this plan was created.
*

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hdfs.server.namenode;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager;
@ -25,7 +26,6 @@
import org.apache.hadoop.net.NodeBase;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.net.HttpHeaders;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.StringUtils;
import javax.servlet.ServletContext;
@ -123,7 +123,8 @@ protected void printTopology(PrintStream stream, List<Node> leaves,
protected void printJsonFormat(PrintStream stream, Map<String,
TreeSet<String>> tree, ArrayList<String> racks) throws IOException {
JsonGenerator dumpGenerator = JacksonUtil.getSharedWriter().createGenerator(stream);
JsonFactory dumpFactory = new JsonFactory();
JsonGenerator dumpGenerator = dumpFactory.createGenerator(stream);
dumpGenerator.writeStartArray();
for(String r : racks) {

View File

@ -21,6 +21,7 @@
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import org.apache.hadoop.hdfs.server.namenode.startupprogress.Phase;
import org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgress;
@ -28,7 +29,6 @@
import org.apache.hadoop.hdfs.server.namenode.startupprogress.Step;
import org.apache.hadoop.hdfs.server.namenode.startupprogress.StepType;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.classification.InterfaceAudience;
@ -61,7 +61,7 @@ protected void doGet(HttpServletRequest req, HttpServletResponse resp)
StartupProgress prog = NameNodeHttpServer.getStartupProgressFromContext(
getServletContext());
StartupProgressView view = prog.createView();
JsonGenerator json = JacksonUtil.getSharedWriter().createGenerator(resp.getWriter());
JsonGenerator json = new JsonFactory().createGenerator(resp.getWriter());
try {
json.writeStartObject();
json.writeNumberField(ELAPSED_TIME, view.getElapsedTime());

View File

@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hdfs.web;
import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.ContentSummary;
@ -39,12 +38,13 @@
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Lists;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
import java.util.*;
@ -52,11 +52,11 @@
public class JsonUtil {
private static final Object[] EMPTY_OBJECT_ARRAY = {};
// Reuse ObjectWriter instance for improving performance.
// ObjectWriter is thread safe as long as we always configure instance
// Reuse ObjectMapper instance for improving performance.
// ObjectMapper is thread safe as long as we always configure instance
// before use. We don't have a re-entrant call pattern in WebHDFS,
// so we just need to worry about thread-safety.
private static final ObjectWriter SHARED_WRITER = JacksonUtil.getSharedWriter();
private static final ObjectMapper MAPPER = new ObjectMapper();
/** Convert a token object to a Json string. */
public static String toJsonString(final Token<? extends TokenIdentifier> token
@ -93,7 +93,7 @@ public static String toJsonString(final String key, final Object value) {
final Map<String, Object> m = new TreeMap<String, Object>();
m.put(key, value);
try {
return SHARED_WRITER.writeValueAsString(m);
return MAPPER.writeValueAsString(m);
} catch (IOException ignored) {
}
return null;
@ -113,7 +113,7 @@ public static String toJsonString(final HdfsFileStatus status,
final Map<String, Object> m = toJsonMap(status);
try {
return includeType ?
toJsonString(FileStatus.class, m) : SHARED_WRITER.writeValueAsString(m);
toJsonString(FileStatus.class, m) : MAPPER.writeValueAsString(m);
} catch (IOException ignored) {
}
return null;
@ -453,7 +453,7 @@ public static String toJsonString(final AclStatus status) {
finalMap.put(AclStatus.class.getSimpleName(), m);
try {
return SHARED_WRITER.writeValueAsString(finalMap);
return MAPPER.writeValueAsString(finalMap);
} catch (IOException ignored) {
}
return null;
@ -491,7 +491,7 @@ public static String toJsonString(final List<XAttr> xAttrs,
final XAttrCodec encoding) throws IOException {
final Map<String, Object> finalMap = new TreeMap<String, Object>();
finalMap.put("XAttrs", toJsonArray(xAttrs, encoding));
return SHARED_WRITER.writeValueAsString(finalMap);
return MAPPER.writeValueAsString(finalMap);
}
public static String toJsonString(final List<XAttr> xAttrs)
@ -500,14 +500,14 @@ public static String toJsonString(final List<XAttr> xAttrs)
for (XAttr xAttr : xAttrs) {
names.add(XAttrHelper.getPrefixedName(xAttr));
}
String ret = SHARED_WRITER.writeValueAsString(names);
String ret = MAPPER.writeValueAsString(names);
final Map<String, Object> finalMap = new TreeMap<String, Object>();
finalMap.put("XAttrNames", ret);
return SHARED_WRITER.writeValueAsString(finalMap);
return MAPPER.writeValueAsString(finalMap);
}
public static String toJsonString(Object obj) throws IOException {
return SHARED_WRITER.writeValueAsString(obj);
return MAPPER.writeValueAsString(obj);
}
public static String toJsonString(BlockStoragePolicy[] storagePolicies) {

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.mapred;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.core.JsonGenerator;
import org.apache.hadoop.classification.InterfaceAudience;
@ -27,7 +28,6 @@
import org.apache.hadoop.mapreduce.QueueState;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -531,7 +531,8 @@ static void dumpConfiguration(Writer out, String configFile,
return;
}
JsonGenerator dumpGenerator = JacksonUtil.getSharedWriter().createGenerator(out);
JsonFactory dumpFactory = new JsonFactory();
JsonGenerator dumpGenerator = dumpFactory.createGenerator(out);
QueueConfigurationParser parser;
boolean aclsEnabled = false;
if (conf != null) {

View File

@ -28,7 +28,6 @@
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.CounterGroup;
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric;
/**
@ -42,7 +41,7 @@ private JobHistoryEventUtils() {
public static final int ATS_CONFIG_PUBLISH_SIZE_BYTES = 10 * 1024;
public static JsonNode countersToJSON(Counters counters) {
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
ObjectMapper mapper = new ObjectMapper();
ArrayNode nodes = mapper.createArrayNode();
if (counters != null) {
for (CounterGroup counterGroup : counters) {

View File

@ -22,6 +22,7 @@
import java.nio.charset.StandardCharsets;
import java.util.Map;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -29,7 +30,6 @@
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.s3a.S3AUtils;
import org.apache.hadoop.util.JacksonUtil;
import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_CONTEXT;
@ -91,8 +91,8 @@ public static String getS3EncryptionContextBase64Encoded(
if (encryptionContextMap.isEmpty()) {
return "";
}
final String encryptionContextJson = JacksonUtil.getSharedWriter()
.writeValueAsString(encryptionContextMap);
final String encryptionContextJson = new ObjectMapper().writeValueAsString(
encryptionContextMap);
return Base64.encodeBase64String(encryptionContextJson.getBytes(StandardCharsets.UTF_8));
} catch (IOException e) {
if (propagateExceptions) {

View File

@ -84,7 +84,6 @@
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.LambdaUtils;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.Time;
@ -97,6 +96,7 @@
import static org.apache.hadoop.fs.azure.NativeAzureFileSystemHelper.*;
import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapabilityArgs;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.classification.VisibleForTesting;
import com.microsoft.azure.storage.StorageException;
@ -127,7 +127,7 @@ public static class FolderRenamePending {
private static final int FORMATTING_BUFFER = 10000;
private boolean committed;
public static final String SUFFIX = "-RenamePending.json";
private static final ObjectReader READER = JacksonUtil.createBasicObjectMapper()
private static final ObjectReader READER = new ObjectMapper()
.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true)
.readerFor(JsonNode.class);

View File

@ -24,11 +24,11 @@
import java.util.List;
import java.util.concurrent.TimeUnit;
import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.azure.security.Constants;
import org.apache.hadoop.io.retry.RetryPolicy;
import org.apache.hadoop.io.retry.RetryUtils;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.http.NameValuePair;
@ -40,7 +40,7 @@
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectMapper;
import static org.apache.hadoop.fs.azure.WasbRemoteCallHelper.REMOTE_CALL_SUCCESS_CODE;
@ -53,8 +53,8 @@ public class RemoteSASKeyGeneratorImpl extends SASKeyGeneratorImpl {
public static final Logger LOG =
LoggerFactory.getLogger(AzureNativeFileSystemStore.class);
private static final ObjectReader RESPONSE_READER = JacksonUtil
.createBasicReaderFor(RemoteSASKeyGenerationResponse.class);
private static final ObjectReader RESPONSE_READER = new ObjectMapper()
.readerFor(RemoteSASKeyGenerationResponse.class);
/**
* Configuration parameter name expected in the Configuration

View File

@ -20,6 +20,7 @@
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.commons.lang3.StringUtils;
@ -28,14 +29,13 @@
import org.apache.hadoop.io.retry.RetryPolicy;
import org.apache.hadoop.io.retry.RetryUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.utils.URIBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.TimeUnit;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import static org.apache.hadoop.fs.azure.WasbRemoteCallHelper.REMOTE_CALL_SUCCESS_CODE;
@ -49,8 +49,8 @@ public class RemoteWasbAuthorizerImpl implements WasbAuthorizerInterface {
public static final Logger LOG = LoggerFactory
.getLogger(RemoteWasbAuthorizerImpl.class);
private static final ObjectReader RESPONSE_READER = JacksonUtil
.createBasicReaderFor(RemoteWasbAuthorizerResponse.class);
private static final ObjectReader RESPONSE_READER = new ObjectMapper()
.readerFor(RemoteWasbAuthorizerResponse.class);
/**
* Configuration parameter name expected in the Configuration object to
@ -176,7 +176,7 @@ private boolean authorizeInternal(String wasbAbsolutePath, String accessType, St
uriBuilder
.addParameter(WASB_ABSOLUTE_PATH_QUERY_PARAM_NAME, wasbAbsolutePath);
uriBuilder.addParameter(ACCESS_OPERATION_QUERY_PARAM_NAME, accessType);
if (StringUtils.isNotEmpty(resourceOwner)) {
if (resourceOwner != null && StringUtils.isNotEmpty(resourceOwner)) {
uriBuilder.addParameter(WASB_RESOURCE_OWNER_QUERY_PARAM_NAME,
resourceOwner);
}

View File

@ -29,6 +29,9 @@
import java.util.Hashtable;
import java.util.Map;
import org.apache.hadoop.util.Preconditions;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import org.slf4j.Logger;
@ -39,8 +42,6 @@
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.azurebfs.services.AbfsIoUtils;
import org.apache.hadoop.fs.azurebfs.services.ExponentialRetryPolicy;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Preconditions;
/**
* This class provides convenience methods to obtain AAD tokens.
@ -492,7 +493,8 @@ private static AzureADToken parseTokenFromStream(
int expiryPeriodInSecs = 0;
long expiresOnInSecs = -1;
JsonParser jp = JacksonUtil.createBasicJsonFactory().createParser(httpResponseStream);
JsonFactory jf = new JsonFactory();
JsonParser jp = jf.createParser(httpResponseStream);
String fieldName, fieldValue;
jp.nextToken();
while (jp.hasCurrentToken()) {

View File

@ -30,6 +30,7 @@
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -39,7 +40,6 @@
import org.apache.hadoop.fs.azurebfs.contracts.services.AbfsPerfLoggable;
import org.apache.hadoop.fs.azurebfs.contracts.services.ListResultSchema;
import org.apache.hadoop.fs.azurebfs.utils.UriUtils;
import org.apache.hadoop.util.JacksonUtil;
/**
* Base Http operation class for orchestrating server IO calls. Child classes would
@ -447,7 +447,7 @@ private void processStorageErrorResponse() {
if (stream == null) {
return;
}
JsonFactory jf = JacksonUtil.createBasicJsonFactory();
JsonFactory jf = new JsonFactory();
try (JsonParser jp = jf.createParser(stream)) {
String fieldName, fieldValue;
jp.nextToken(); // START_OBJECT - {
@ -509,7 +509,8 @@ private void parseListFilesResponse(final InputStream stream)
}
try {
this.listResultSchema = JacksonUtil.getSharedReader().readValue(stream,
final ObjectMapper objectMapper = new ObjectMapper();
this.listResultSchema = objectMapper.readValue(stream,
ListResultSchema.class);
} catch (IOException ex) {
log.error("Unable to deserialize list results", ex);

View File

@ -51,7 +51,6 @@
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler;
import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
@ -485,7 +484,7 @@ static Set<String> parseStaleDataNodeList(String liveNodeJsonString,
final int blockThreshold, final Logger log) throws IOException {
final Set<String> dataNodesToReport = new HashSet<>();
JsonFactory fac = JacksonUtil.createBasicJsonFactory();
JsonFactory fac = new JsonFactory();
JsonParser parser = fac.createParser(IOUtils
.toInputStream(liveNodeJsonString, StandardCharsets.UTF_8.name()));
@ -555,7 +554,7 @@ static String fetchNameNodeJMXValue(Properties nameNodeProperties,
"Unable to retrieve JMX: " + conn.getResponseMessage());
}
InputStream in = conn.getInputStream();
JsonFactory fac = JacksonUtil.createBasicJsonFactory();
JsonFactory fac = new JsonFactory();
JsonParser parser = fac.createParser(in);
if (parser.nextToken() != JsonToken.START_OBJECT
|| parser.nextToken() != JsonToken.FIELD_NAME

View File

@ -22,6 +22,7 @@
import java.io.OutputStream;
import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.Version;
import com.fasterxml.jackson.databind.ObjectMapper;
@ -35,7 +36,6 @@
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.io.compress.Compressor;
import org.apache.hadoop.mapreduce.ID;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.tools.rumen.datatypes.*;
@ -55,7 +55,8 @@ public class Anonymizer extends Configured implements Tool {
private StatePool statePool;
private ObjectMapper outMapper = null;
private JsonFactory outFactory = null;
private void initialize(String[] args) throws Exception {
try {
for (int i = 0; i < args.length; ++i) {
@ -84,7 +85,7 @@ private void initialize(String[] args) throws Exception {
// initialize the state manager after the anonymizers are registered
statePool.initialize(getConf());
outMapper = JacksonUtil.createBasicObjectMapper();
outMapper = new ObjectMapper();
// define a module
SimpleModule module = new SimpleModule(
"Anonymization Serializer", new Version(0, 1, 1, "FINAL", "", ""));
@ -103,6 +104,8 @@ private void initialize(String[] args) throws Exception {
// register the module with the object-mapper
outMapper.registerModule(module);
outFactory = outMapper.getFactory();
}
// anonymize the job trace file
@ -188,7 +191,7 @@ private JsonGenerator createJsonGenerator(Configuration conf, Path path)
}
JsonGenerator outGen =
outMapper.createGenerator(output, JsonEncoding.UTF8);
outFactory.createGenerator(output, JsonEncoding.UTF8);
outGen.useDefaultPrettyPrinter();
return outGen;

View File

@ -26,7 +26,6 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.JacksonUtil;
/**
* A simple wrapper for parsing JSON-encoded data using ObjectMapper.
@ -49,10 +48,10 @@ class JsonObjectMapperParser<T> implements Closeable {
*/
public JsonObjectMapperParser(Path path, Class<? extends T> clazz,
Configuration conf) throws IOException {
mapper = JacksonUtil.createBasicObjectMapper();
mapper = new ObjectMapper();
this.clazz = clazz;
InputStream input = new PossiblyDecompressedInputStream(path, conf);
jsonParser = mapper.createParser(input);
jsonParser = mapper.getFactory().createParser(input);
}
/**
@ -63,9 +62,9 @@ public JsonObjectMapperParser(Path path, Class<? extends T> clazz,
*/
public JsonObjectMapperParser(InputStream input, Class<? extends T> clazz)
throws IOException {
mapper = JacksonUtil.createBasicObjectMapper();
mapper = new ObjectMapper();
this.clazz = clazz;
jsonParser = mapper.createParser(input);
jsonParser = mapper.getFactory().createParser(input);
}
/**

View File

@ -30,7 +30,6 @@
import org.apache.hadoop.tools.rumen.datatypes.DataType;
import org.apache.hadoop.tools.rumen.serializers.DefaultRumenSerializer;
import org.apache.hadoop.tools.rumen.serializers.ObjectStringSerializer;
import org.apache.hadoop.util.JacksonUtil;
/**
* Simple wrapper around {@link JsonGenerator} to write objects in JSON format.
@ -40,7 +39,7 @@ public class JsonObjectMapperWriter<T> implements Closeable {
private JsonGenerator writer;
public JsonObjectMapperWriter(OutputStream output, boolean prettyPrint) throws IOException {
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
ObjectMapper mapper = new ObjectMapper();
// define a module
SimpleModule module = new SimpleModule(
@ -54,7 +53,7 @@ public JsonObjectMapperWriter(OutputStream output, boolean prettyPrint) throws I
// register the module with the object-mapper
mapper.registerModule(module);
writer = mapper.createGenerator(output, JsonEncoding.UTF8);
writer = mapper.getFactory().createGenerator(output, JsonEncoding.UTF8);
if (prettyPrint) {
writer.useDefaultPrettyPrinter();
}

View File

@ -30,6 +30,7 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.Version;
@ -43,7 +44,6 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.tools.rumen.Anonymizer;
import org.apache.hadoop.tools.rumen.datatypes.DataType;
import org.apache.hadoop.util.JacksonUtil;
/**
* A pool of states. States used by {@link DataType}'s can be managed the
@ -206,7 +206,7 @@ private boolean reloadState(Path stateFile, Configuration configuration)
}
private void read(DataInput in) throws IOException {
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
ObjectMapper mapper = new ObjectMapper();
// define a module
SimpleModule module = new SimpleModule("State Serializer",
new Version(0, 1, 1, "FINAL", "", ""));
@ -216,7 +216,7 @@ private void read(DataInput in) throws IOException {
// register the module with the object-mapper
mapper.registerModule(module);
JsonParser parser = mapper.createParser((InputStream)in);
JsonParser parser = mapper.getFactory().createParser((InputStream)in);
StatePool statePool = mapper.readValue(parser, StatePool.class);
this.setStates(statePool.getStates());
parser.close();
@ -273,7 +273,7 @@ public void persist() throws IOException {
private void write(DataOutput out) throws IOException {
// This is just a JSON experiment
System.out.println("Dumping the StatePool's in JSON format.");
ObjectMapper outMapper = JacksonUtil.createBasicObjectMapper();
ObjectMapper outMapper = new ObjectMapper();
// define a module
SimpleModule module = new SimpleModule("State Serializer",
new Version(0, 1, 1, "FINAL", "", ""));
@ -283,8 +283,9 @@ private void write(DataOutput out) throws IOException {
// register the module with the object-mapper
outMapper.registerModule(module);
JsonFactory outFactory = outMapper.getFactory();
JsonGenerator jGen =
outMapper.createGenerator((OutputStream)out, JsonEncoding.UTF8);
outFactory.createGenerator((OutputStream)out, JsonEncoding.UTF8);
jGen.useDefaultPrettyPrinter();
jGen.writeObject(this);

View File

@ -23,6 +23,7 @@
import java.util.List;
import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.ObjectMapper;
@ -140,8 +141,9 @@ public static void main(String[] args) throws IOException {
Path goldFilePath = new Path(filePath.getParent(), "gold"+testName);
ObjectMapper mapper = new ObjectMapper();
JsonFactory factory = mapper.getFactory();
FSDataOutputStream ostream = lfs.create(goldFilePath, true);
JsonGenerator gen = mapper.createGenerator((OutputStream)ostream,
JsonGenerator gen = factory.createGenerator((OutputStream)ostream,
JsonEncoding.UTF8);
gen.useDefaultPrettyPrinter();

View File

@ -16,13 +16,13 @@
package org.apache.hadoop.yarn.sls;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.tools.rumen.JobTraceReader;
import org.apache.hadoop.tools.rumen.LoggedJob;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ReservationId;
@ -44,8 +44,11 @@
import java.io.InputStreamReader;
import java.io.Reader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
@ -119,14 +122,15 @@ public void startAM() throws YarnException, IOException {
* Parse workload from a SLS trace file.
*/
private void startAMFromSLSTrace(String inputTrace) throws IOException {
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
JsonFactory jsonF = new JsonFactory();
ObjectMapper mapper = new ObjectMapper();
try (Reader input = new InputStreamReader(
new FileInputStream(inputTrace), StandardCharsets.UTF_8)) {
JavaType type = mapper.getTypeFactory().
constructMapType(Map.class, String.class, String.class);
Iterator<Map<String, String>> jobIter = mapper.readValues(
mapper.createParser(input), type);
jsonF.createParser(input), type);
while (jobIter.hasNext()) {
try {

View File

@ -35,6 +35,7 @@
import java.util.TreeMap;
import java.util.TreeSet;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.commons.cli.CommandLine;
@ -43,7 +44,6 @@
import org.apache.commons.cli.Options;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.sls.utils.SLSUtils;
@Private
@ -126,10 +126,10 @@ private static void generateSLSLoadFile(String inputFile, String outputFile)
StandardCharsets.UTF_8)) {
try (Writer output =
new OutputStreamWriter(new FileOutputStream(outputFile), StandardCharsets.UTF_8)) {
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
ObjectMapper mapper = new ObjectMapper();
ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
Iterator<Map> i = mapper.readValues(
mapper.createParser(input), Map.class);
new JsonFactory().createParser(input), Map.class);
while (i.hasNext()) {
Map m = i.next();
output.write(writer.writeValueAsString(createSLSJob(m)) + EOL);
@ -143,7 +143,7 @@ private static void generateSLSNodeFile(String outputFile)
throws IOException {
try (Writer output =
new OutputStreamWriter(new FileOutputStream(outputFile), StandardCharsets.UTF_8)) {
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
ObjectMapper mapper = new ObjectMapper();
ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
for (Map.Entry<String, Set<String>> entry : rackNodeMap.entrySet()) {
Map rack = new LinkedHashMap();

View File

@ -34,7 +34,6 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.tools.rumen.JobStory;
import org.apache.hadoop.tools.rumen.JobStoryProducer;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.api.records.ExecutionType;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.sls.appmaster.MRAMSimulator;
@ -89,8 +88,7 @@ public SynthTraceJobProducer(Configuration conf, Path path)
JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder();
jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true);
ObjectMapper mapper = JacksonUtil.createObjectMapper(jsonFactoryBuilder.build());
ObjectMapper mapper = new ObjectMapper(jsonFactoryBuilder.build());
mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false);
FileSystem ifs = path.getFileSystem(conf);

View File

@ -34,6 +34,7 @@
import java.util.Map;
import java.util.Set;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
@ -44,7 +45,6 @@
import org.apache.hadoop.tools.rumen.LoggedJob;
import org.apache.hadoop.tools.rumen.LoggedTask;
import org.apache.hadoop.tools.rumen.LoggedTaskAttempt;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.api.records.NodeLabel;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceInformation;
@ -120,11 +120,12 @@ public static Set<NodeDetails> parseNodesFromRumenTrace(
public static Set<NodeDetails> parseNodesFromSLSTrace(
String jobTrace) throws IOException {
Set<NodeDetails> nodeSet = new HashSet<>();
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
JsonFactory jsonF = new JsonFactory();
ObjectMapper mapper = new ObjectMapper();
Reader input =
new InputStreamReader(new FileInputStream(jobTrace), StandardCharsets.UTF_8);
try {
Iterator<Map> i = mapper.readValues(mapper.createParser(input), Map.class);
Iterator<Map> i = mapper.readValues(jsonF.createParser(input), Map.class);
while (i.hasNext()) {
addNodes(nodeSet, i.next());
}
@ -166,11 +167,12 @@ private static void addNodes(Set<NodeDetails> nodeSet,
public static Set<NodeDetails> parseNodesFromNodeFile(
String nodeFile, Resource nmDefaultResource) throws IOException {
Set<NodeDetails> nodeSet = new HashSet<>();
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
JsonFactory jsonF = new JsonFactory();
ObjectMapper mapper = new ObjectMapper();
Reader input =
new InputStreamReader(new FileInputStream(nodeFile), StandardCharsets.UTF_8);
try {
Iterator<Map> i = mapper.readValues(mapper.createParser(input), Map.class);
Iterator<Map> i = mapper.readValues(jsonF.createParser(input), Map.class);
while (i.hasNext()) {
Map jsonE = i.next();
String rack = "/" + jsonE.get("rack");

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.yarn.sls;
import org.apache.commons.math3.random.JDKRandomGenerator;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.api.records.ExecutionType;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.sls.synthetic.SynthJob;
@ -61,7 +60,7 @@ public void testWorkloadGenerateTime()
JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder();
jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true);
ObjectMapper mapper = JacksonUtil.createObjectMapper(jsonFactoryBuilder.build());
ObjectMapper mapper = new ObjectMapper(jsonFactoryBuilder.build());
mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false);
SynthTraceJobProducer.Workload wl =
mapper.readValue(workloadJson, SynthTraceJobProducer.Workload.class);
@ -182,7 +181,7 @@ public void testSample() throws IOException {
JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder();
jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true);
ObjectMapper mapper = JacksonUtil.createObjectMapper(jsonFactoryBuilder.build());
ObjectMapper mapper = new ObjectMapper(jsonFactoryBuilder.build());
mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false);
JDKRandomGenerator rand = new JDKRandomGenerator();

View File

@ -28,7 +28,6 @@
import java.util.Properties;
import java.util.Random;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.appcatalog.model.AppEntry;
import org.apache.hadoop.yarn.appcatalog.model.AppStoreEntry;
import org.apache.hadoop.yarn.appcatalog.model.Application;
@ -58,18 +57,6 @@ public class AppCatalogSolrClient {
private static final Logger LOG = LoggerFactory.getLogger(AppCatalogSolrClient.class);
private static String urlString;
/**
* It is more performant to reuse ObjectMapper instances but keeping the instance
* private makes it harder for someone to reconfigure it which might have unwanted
* side effects.
*/
private static final ObjectMapper OBJECT_MAPPER;
static {
OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper();
OBJECT_MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
}
public AppCatalogSolrClient() {
// Locate Solr URL
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
@ -159,6 +146,8 @@ public List<AppStoreEntry> search(String keyword) {
public List<AppEntry> listAppEntries() {
List<AppEntry> list = new ArrayList<AppEntry>();
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
SolrClient solr = getSolrClient();
SolrQuery query = new SolrQuery();
@ -175,7 +164,7 @@ public List<AppEntry> listAppEntries() {
entry.setId(d.get("id").toString());
entry.setName(d.get("name_s").toString());
entry.setApp(d.get("app_s").toString());
entry.setYarnfile(OBJECT_MAPPER.readValue(d.get("yarnfile_s").toString(),
entry.setYarnfile(mapper.readValue(d.get("yarnfile_s").toString(),
Service.class));
list.add(entry);
}
@ -187,6 +176,8 @@ public List<AppEntry> listAppEntries() {
public AppStoreEntry findAppStoreEntry(String id) {
AppStoreEntry entry = new AppStoreEntry();
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
SolrClient solr = getSolrClient();
SolrQuery query = new SolrQuery();
@ -206,7 +197,7 @@ public AppStoreEntry findAppStoreEntry(String id) {
entry.setDesc(d.get("desc_s").toString());
entry.setLike(Integer.parseInt(d.get("like_i").toString()));
entry.setDownload(Integer.parseInt(d.get("download_i").toString()));
Service yarnApp = OBJECT_MAPPER.readValue(d.get("yarnfile_s").toString(),
Service yarnApp = mapper.readValue(d.get("yarnfile_s").toString(),
Service.class);
String name;
try {
@ -231,6 +222,9 @@ public AppStoreEntry findAppStoreEntry(String id) {
public AppEntry findAppEntry(String id) {
AppEntry entry = new AppEntry();
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
SolrClient solr = getSolrClient();
SolrQuery query = new SolrQuery();
query.setQuery("id:" + id);
@ -246,7 +240,7 @@ public AppEntry findAppEntry(String id) {
entry.setId(d.get("id").toString());
entry.setApp(d.get("app_s").toString());
entry.setName(d.get("name_s").toString());
entry.setYarnfile(OBJECT_MAPPER.readValue(d.get("yarnfile_s").toString(),
entry.setYarnfile(mapper.readValue(d.get("yarnfile_s").toString(),
Service.class));
}
} catch (SolrServerException | IOException e) {
@ -258,6 +252,8 @@ public AppEntry findAppEntry(String id) {
public void deployApp(String id, Service service) throws SolrServerException,
IOException {
long download = 0;
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
Collection<SolrInputDocument> docs = new HashSet<SolrInputDocument>();
SolrClient solr = getSolrClient();
// Find application information from AppStore
@ -291,7 +287,7 @@ public void deployApp(String id, Service service) throws SolrServerException,
request.addField("id", name);
request.addField("name_s", name);
request.addField("app_s", entry.getOrg()+"/"+entry.getName());
request.addField("yarnfile_s", OBJECT_MAPPER.writeValueAsString(service));
request.addField("yarnfile_s", mapper.writeValueAsString(service));
docs.add(request);
}
@ -330,6 +326,8 @@ public void deleteApp(String id) {
public void register(Application app) throws IOException {
Collection<SolrInputDocument> docs = new HashSet<SolrInputDocument>();
SolrClient solr = getSolrClient();
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
try {
SolrInputDocument buffer = new SolrInputDocument();
buffer.setField("id", java.util.UUID.randomUUID().toString()
@ -345,10 +343,10 @@ public void register(Application app) throws IOException {
buffer.setField("download_i", 0);
// Keep only YARN data model for yarnfile field
String yarnFile = OBJECT_MAPPER.writeValueAsString(app);
LOG.info("app:{}", yarnFile);
Service yarnApp = OBJECT_MAPPER.readValue(yarnFile, Service.class);
buffer.setField("yarnfile_s", OBJECT_MAPPER.writeValueAsString(yarnApp));
String yarnFile = mapper.writeValueAsString(app);
LOG.info("app:"+yarnFile);
Service yarnApp = mapper.readValue(yarnFile, Service.class);
buffer.setField("yarnfile_s", mapper.writeValueAsString(yarnApp));
docs.add(buffer);
commitSolrChanges(solr, docs);
@ -361,6 +359,8 @@ public void register(Application app) throws IOException {
protected void register(AppStoreEntry app) throws IOException {
Collection<SolrInputDocument> docs = new HashSet<SolrInputDocument>();
SolrClient solr = getSolrClient();
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
try {
SolrInputDocument buffer = new SolrInputDocument();
buffer.setField("id", java.util.UUID.randomUUID().toString()
@ -376,10 +376,10 @@ protected void register(AppStoreEntry app) throws IOException {
buffer.setField("download_i", app.getDownload());
// Keep only YARN data model for yarnfile field
String yarnFile = OBJECT_MAPPER.writeValueAsString(app);
LOG.info("app:{}", yarnFile);
Service yarnApp = OBJECT_MAPPER.readValue(yarnFile, Service.class);
buffer.setField("yarnfile_s", OBJECT_MAPPER.writeValueAsString(yarnApp));
String yarnFile = mapper.writeValueAsString(app);
LOG.info("app:"+yarnFile);
Service yarnApp = mapper.readValue(yarnFile, Service.class);
buffer.setField("yarnfile_s", mapper.writeValueAsString(yarnApp));
docs.add(buffer);
commitSolrChanges(solr, docs);
@ -391,6 +391,8 @@ protected void register(AppStoreEntry app) throws IOException {
public void upgradeApp(Service service) throws IOException,
SolrServerException {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
Collection<SolrInputDocument> docs = new HashSet<SolrInputDocument>();
SolrClient solr = getSolrClient();
if (service!=null) {
@ -418,7 +420,7 @@ public void upgradeApp(Service service) throws IOException,
request.addField("id", name);
request.addField("name_s", name);
request.addField("app_s", app);
request.addField("yarnfile_s", OBJECT_MAPPER.writeValueAsString(service));
request.addField("yarnfile_s", mapper.writeValueAsString(service));
docs.add(request);
}
try {

View File

@ -23,7 +23,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.appcatalog.model.AppEntry;
import org.apache.hadoop.yarn.service.api.records.Service;
import org.apache.hadoop.yarn.service.api.records.ServiceState;
@ -47,19 +46,6 @@
public class YarnServiceClient {
private static final Logger LOG = LoggerFactory.getLogger(YarnServiceClient.class);
/**
* It is more performant to reuse ObjectMapper instances but keeping the instance
* private makes it harder for someone to reconfigure it which might have unwanted
* side effects.
*/
private static final ObjectMapper OBJECT_MAPPER;
static {
OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper();
OBJECT_MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
}
private static Configuration conf = new Configuration();
private static ClientConfig getClientConfig() {
ClientConfig config = new DefaultClientConfig();
@ -80,6 +66,8 @@ public YarnServiceClient() {
}
public void createApp(Service app) {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
ClientResponse response;
try {
boolean useKerberos = UserGroupInformation.isSecurityEnabled();
@ -102,7 +90,7 @@ public void createApp(Service app) {
app.setKerberosPrincipal(kerberos);
}
response = asc.getApiClient().post(ClientResponse.class,
OBJECT_MAPPER.writeValueAsString(app));
mapper.writeValueAsString(app));
if (response.getStatus() >= 299) {
String message = response.getEntity(String.class);
throw new RuntimeException("Failed : HTTP error code : "
@ -131,8 +119,10 @@ public void deleteApp(String appInstanceId) {
}
public void restartApp(Service app) throws JsonProcessingException {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
String appInstanceId = app.getName();
String yarnFile = OBJECT_MAPPER.writeValueAsString(app);
String yarnFile = mapper.writeValueAsString(app);
ClientResponse response;
try {
response = asc.getApiClient(asc.getServicePath(appInstanceId))
@ -149,8 +139,10 @@ public void restartApp(Service app) throws JsonProcessingException {
}
public void stopApp(Service app) throws JsonProcessingException {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
String appInstanceId = app.getName();
String yarnFile = OBJECT_MAPPER.writeValueAsString(app);
String yarnFile = mapper.writeValueAsString(app);
ClientResponse response;
try {
response = asc.getApiClient(asc.getServicePath(appInstanceId))
@ -167,12 +159,14 @@ public void stopApp(Service app) throws JsonProcessingException {
}
public void getStatus(AppEntry entry) {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
String appInstanceId = entry.getName();
Service app = null;
try {
String yarnFile = asc.getApiClient(asc.getServicePath(appInstanceId))
.get(String.class);
app = OBJECT_MAPPER.readValue(yarnFile, Service.class);
app = mapper.readValue(yarnFile, Service.class);
entry.setYarnfile(app);
} catch (UniformInterfaceException | IOException e) {
LOG.error("Error in fetching application status: ", e);
@ -180,9 +174,11 @@ public void getStatus(AppEntry entry) {
}
public void upgradeApp(Service app) throws JsonProcessingException {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
String appInstanceId = app.getName();
app.setState(ServiceState.EXPRESS_UPGRADING);
String yarnFile = OBJECT_MAPPER.writeValueAsString(app);
String yarnFile = mapper.writeValueAsString(app);
ClientResponse response;
try {
response = asc.getApiClient(asc.getServicePath(appInstanceId))

View File

@ -26,7 +26,6 @@
import org.apache.hadoop.registry.client.binding.RegistryPathUtils;
import org.apache.hadoop.registry.client.types.ServiceRecord;
import org.apache.hadoop.registry.client.types.yarn.PersistencePolicies;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
@ -876,7 +875,7 @@ public void updateContainerStatus(ContainerStatus status) {
doRegistryUpdate = false;
}
}
final ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
ObjectMapper mapper = new ObjectMapper();
try {
Map<String, List<Map<String, String>>> ports = null;
ports = mapper.readValue(status.getExposedPorts(),

View File

@ -30,7 +30,6 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.JacksonUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -62,10 +61,9 @@ public class JsonSerDeser<T> {
@SuppressWarnings("deprecation")
public JsonSerDeser(Class<T> classType) {
this.classType = classType;
this.mapper = JacksonUtil.createBasicObjectMapper();
this.mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.configure(SerializationFeature.WRITE_NULL_MAP_VALUES, false);
mapper.configure(SerializationFeature.INDENT_OUTPUT, true);
}
public JsonSerDeser(Class<T> classType, PropertyNamingStrategy namingStrategy) {
@ -233,6 +231,7 @@ private void writeJsonAsBytes(T instance,
* @throws JsonProcessingException parse problems
*/
public String toJson(T instance) throws JsonProcessingException {
mapper.configure(SerializationFeature.INDENT_OUTPUT, true);
return mapper.writeValueAsString(instance);
}

View File

@ -23,7 +23,6 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.service.exceptions.BadConfigException;
import java.io.IOException;
@ -42,18 +41,6 @@
@JsonInclude(value = JsonInclude.Include.NON_NULL)
public class PublishedConfiguration {
/**
* It is more performant to reuse ObjectMapper instances but keeping the instance
* private makes it harder for someone to reconfigure it which might have unwanted
* side effects.
*/
private static final ObjectMapper OBJECT_MAPPER;
static {
OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper();
OBJECT_MAPPER.configure(SerializationFeature.INDENT_OUTPUT, true);
}
public String description;
public long updated;
@ -167,7 +154,9 @@ public Properties asProperties() {
* @throws IOException marshalling failure
*/
public String asJson() throws IOException {
String json = OBJECT_MAPPER.writeValueAsString(entries);
ObjectMapper mapper = new ObjectMapper();
mapper.configure(SerializationFeature.INDENT_OUTPUT, true);
String json = mapper.writeValueAsString(entries);
return json;
}

View File

@ -49,7 +49,6 @@
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
@ -61,6 +60,7 @@
import org.apache.hadoop.yarn.exceptions.YarnException;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.util.MinimalPrettyPrinter;
import com.fasterxml.jackson.databind.ObjectMapper;
@ -274,7 +274,7 @@ public void flush() throws IOException {
}
private ObjectMapper createObjectMapper() {
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
ObjectMapper mapper = new ObjectMapper();
mapper.setAnnotationIntrospector(
new JaxbAnnotationIntrospector(TypeFactory.defaultInstance()));
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
@ -365,8 +365,8 @@ public long getLastModifiedTime() {
protected void prepareForWrite() throws IOException{
this.stream = createLogFileStream(fs, logPath);
this.jsonGenerator = JacksonUtil.getSharedWriter()
.createGenerator((OutputStream)stream);
this.jsonGenerator = new JsonFactory().createGenerator(
(OutputStream)stream);
this.jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
this.lastModifiedTime = Time.monotonicNow();
}

View File

@ -30,7 +30,6 @@
import org.apache.commons.cli.Options;
import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
import org.apache.hadoop.util.JacksonUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
@ -63,7 +62,7 @@ public class TimelineClientImpl extends TimelineClient {
private static final Logger LOG =
LoggerFactory.getLogger(TimelineClientImpl.class);
private static final ObjectMapper MAPPER = JacksonUtil.createBasicObjectMapper();
private static final ObjectMapper MAPPER = new ObjectMapper();
private static final String RESOURCE_URI_STR_V1 = "/ws/v1/timeline/";
private static Options opts;

View File

@ -27,9 +27,9 @@
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.security.DockerCredentialTokenIdentifier;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
@ -96,8 +96,9 @@ public static Credentials readCredentialsFromConfigFile(Path configFile,
}
// Parse the JSON and create the Tokens/Credentials.
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
JsonParser parser = mapper.createParser(contents);
ObjectMapper mapper = new ObjectMapper();
JsonFactory factory = mapper.getFactory();
JsonParser parser = factory.createParser(contents);
JsonNode rootNode = mapper.readTree(parser);
Credentials credentials = new Credentials();
@ -160,7 +161,7 @@ public static boolean writeDockerCredentialsToPath(File outConfigFile,
Credentials credentials) throws IOException {
boolean foundDockerCred = false;
if (credentials.numberOfTokens() > 0) {
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
ObjectMapper mapper = new ObjectMapper();
ObjectNode rootNode = mapper.createObjectNode();
ObjectNode registryUrlNode = mapper.createObjectNode();
for (Token<? extends TokenIdentifier> tk : credentials.getAllTokens()) {

View File

@ -31,7 +31,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.VersionInfo;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.timeline.TimelineAbout;
@ -54,10 +53,11 @@ public class TimelineUtils {
"TIMELINE_FLOW_RUN_ID_TAG";
public final static String DEFAULT_FLOW_VERSION = "1";
private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper();
private static ObjectMapper mapper;
static {
YarnJacksonJaxbJsonProvider.configObjectMapper(OBJECT_MAPPER);
mapper = new ObjectMapper();
YarnJacksonJaxbJsonProvider.configObjectMapper(mapper);
}
/**
@ -90,9 +90,9 @@ public static String dumpTimelineRecordtoJSON(Object o)
public static String dumpTimelineRecordtoJSON(Object o, boolean pretty)
throws JsonGenerationException, JsonMappingException, IOException {
if (pretty) {
return OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(o);
return mapper.writerWithDefaultPrettyPrinter().writeValueAsString(o);
} else {
return OBJECT_MAPPER.writeValueAsString(o);
return mapper.writeValueAsString(o);
}
}

View File

@ -28,8 +28,8 @@
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.webapp.view.DefaultPage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -42,6 +42,7 @@
@InterfaceAudience.LimitedPrivate({"YARN", "MapReduce"})
public abstract class Controller implements Params {
public static final Logger LOG = LoggerFactory.getLogger(Controller.class);
static final ObjectMapper jsonMapper = new ObjectMapper();
@RequestScoped
public static class RequestContext{
@ -224,7 +225,7 @@ protected void renderJSON(Object object) {
context().rendered = true;
context().response.setContentType(MimeType.JSON);
try {
JacksonUtil.getSharedWriter().writeValue(writer(), object);
jsonMapper.writeValue(writer(), object);
} catch (Exception e) {
throw new WebAppException(e);
}

View File

@ -19,11 +19,11 @@
import java.io.IOException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.util.JacksonUtil;
/**
* A utility class providing methods for serializing and deserializing
@ -38,8 +38,14 @@
public class GenericObjectMapper {
private static final byte[] EMPTY_BYTES = new byte[0];
public static final ObjectReader OBJECT_READER = JacksonUtil.createBasicReaderFor(Object.class);
public static final ObjectWriter OBJECT_WRITER = JacksonUtil.getSharedWriter();
public static final ObjectReader OBJECT_READER;
public static final ObjectWriter OBJECT_WRITER;
static {
ObjectMapper mapper = new ObjectMapper();
OBJECT_READER = mapper.reader(Object.class);
OBJECT_WRITER = mapper.writer();
}
/**
* Serializes an Object into a byte array. Along with {@link #read(byte[])},

View File

@ -43,7 +43,6 @@
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.records.AuxServiceConfiguration;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.records.AuxServiceFile;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.records.AuxServiceRecord;
@ -136,7 +135,7 @@ public class AuxServices extends AbstractService
this.dirsHandler = nmContext.getLocalDirsHandler();
this.delService = deletionService;
this.userUGI = getRemoteUgi();
this.mapper = JacksonUtil.createBasicObjectMapper();
this.mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
// Obtain services from configuration in init()
}

View File

@ -28,11 +28,11 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
@ -58,8 +58,9 @@ public void initialize(Configuration conf) {
+ " we have to set the configuration:" +
YarnConfiguration.NM_NETWORK_TAG_MAPPING_FILE_PATH);
}
ObjectMapper mapper = new ObjectMapper();
try {
networkTagMapping = JacksonUtil.getSharedReader().readValue(new File(mappingJsonFile),
networkTagMapping = mapper.readValue(new File(mappingJsonFile),
NetworkTagMapping.class);
} catch (Exception e) {
throw new YarnRuntimeException(e);

View File

@ -27,7 +27,6 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.protocol.datatransfer.IOStreamPair;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.concurrent.HadoopExecutors;
import org.apache.hadoop.yarn.api.records.ApplicationId;
@ -92,7 +91,6 @@
import static org.apache.hadoop.yarn.conf.YarnConfiguration.NM_RUNC_MANIFEST_TO_RESOURCES_PLUGIN;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.NM_REAP_RUNC_LAYER_MOUNTS_INTERVAL;
import static org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.LinuxContainerRuntimeConstants.*;
/**
* <p>This class is an extension of {@link OCIContainerRuntime} that uses the
* native {@code container-executor} binary via a
@ -208,7 +206,7 @@ public void initialize(Configuration configuration, Context nmCtx)
imageTagToManifestPlugin.init(conf);
manifestToResourcesPlugin = chooseManifestToResourcesPlugin();
manifestToResourcesPlugin.init(conf);
mapper = JacksonUtil.createBasicObjectMapper();
mapper = new ObjectMapper();
defaultRuncImage = conf.get(YarnConfiguration.NM_RUNC_IMAGE_NAME);
allowedNetworks.clear();

View File

@ -26,7 +26,6 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.concurrent.HadoopExecutors;
import java.io.BufferedReader;
@ -43,6 +42,7 @@
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -65,6 +65,7 @@ public class ImageTagToManifestPlugin extends AbstractService
implements RuncImageTagToManifestPlugin {
private Map<String, ImageManifest> manifestCache;
private ObjectMapper objMapper;
private AtomicReference<Map<String, String>> localImageToHashCache =
new AtomicReference<>(new HashMap<>());
private AtomicReference<Map<String, String>> hdfsImageToHashCache =
@ -106,7 +107,7 @@ public ImageManifest getManifestFromImageTag(String imageTag)
}
byte[] bytes = IOUtils.toByteArray(input);
manifest = JacksonUtil.getSharedReader().readValue(bytes, ImageManifest.class);
manifest = objMapper.readValue(bytes, ImageManifest.class);
manifestCache.put(hash, manifest);
return manifest;
@ -278,6 +279,7 @@ protected void serviceInit(Configuration configuration) throws Exception {
DEFAULT_NM_RUNC_IMAGE_TOPLEVEL_DIR) + "/manifests/";
int numManifestsToCache = conf.getInt(NM_RUNC_NUM_MANIFESTS_TO_CACHE,
DEFAULT_NUM_MANIFESTS_TO_CACHE);
this.objMapper = new ObjectMapper();
this.manifestCache = Collections.synchronizedMap(
new LRUCache(numManifestsToCache, 0.75f));
@ -313,7 +315,7 @@ protected void serviceStop() throws Exception {
}
private static class LRUCache extends LinkedHashMap<String, ImageManifest> {
private final int cacheSize;
private int cacheSize;
LRUCache(int initialCapacity, float loadFactor) {
super(initialCapacity, loadFactor, true);

View File

@ -20,10 +20,10 @@
import org.apache.hadoop.classification.VisibleForTesting;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceInformation;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
@ -105,7 +105,8 @@ private void loadProfiles() throws IOException {
resourcesFile = tmp.getPath();
}
}
Map data = JacksonUtil.getSharedReader().readValue(new File(resourcesFile), Map.class);
ObjectMapper mapper = new ObjectMapper();
Map data = mapper.readValue(new File(resourcesFile), Map.class);
Iterator iterator = data.entrySet().iterator();
while (iterator.hasNext()) {
Map.Entry entry = (Map.Entry) iterator.next();

View File

@ -27,7 +27,6 @@
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.server.resourcemanager.placement.csmappingrule.MappingRule;
import org.apache.hadoop.yarn.server.resourcemanager.placement.csmappingrule.MappingRuleAction;
import org.apache.hadoop.yarn.server.resourcemanager.placement.csmappingrule.MappingRuleActions;
@ -44,6 +43,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.classification.VisibleForTesting;
public class MappingRuleCreator {
@ -58,12 +58,14 @@ public MappingRulesDescription getMappingRulesFromJsonFile(String filePath)
MappingRulesDescription getMappingRulesFromJson(byte[] contents)
throws IOException {
return JacksonUtil.getSharedReader().readValue(contents, MappingRulesDescription.class);
ObjectMapper objectMapper = new ObjectMapper();
return objectMapper.readValue(contents, MappingRulesDescription.class);
}
MappingRulesDescription getMappingRulesFromJson(String contents)
throws IOException {
return JacksonUtil.getSharedReader().readValue(contents, MappingRulesDescription.class);
ObjectMapper objectMapper = new ObjectMapper();
return objectMapper.readValue(contents, MappingRulesDescription.class);
}
public List<MappingRule> getMappingRulesFromFile(String jsonPath)

View File

@ -21,7 +21,6 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.QueuePath;
@ -53,11 +52,9 @@ public class LegacyMappingRuleToJson {
public static final String JSON_NODE_MATCHES = "matches";
/**
* It is more performant to reuse ObjectMapper instances but keeping the instance
* private makes it harder for someone to reconfigure it which might have unwanted
* side effects.
* Our internal object mapper, used to create JSON nodes.
*/
private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper();
private ObjectMapper objectMapper = new ObjectMapper();
/**
* Collection to store the legacy group mapping rule strings.
@ -141,8 +138,8 @@ public LegacyMappingRuleToJson setAppNameMappingRules(
*/
public String convert() {
//creating the basic JSON config structure
ObjectNode rootNode = OBJECT_MAPPER.createObjectNode();
ArrayNode rulesNode = OBJECT_MAPPER.createArrayNode();
ObjectNode rootNode = objectMapper.createObjectNode();
ArrayNode rulesNode = objectMapper.createArrayNode();
rootNode.set("rules", rulesNode);
//Processing and adding all the user group mapping rules
@ -161,7 +158,7 @@ public String convert() {
}
try {
return OBJECT_MAPPER
return objectMapper
.writerWithDefaultPrettyPrinter()
.writeValueAsString(rootNode);
} catch (JsonProcessingException e) {
@ -249,7 +246,7 @@ private String[] splitRule(String rule, int expectedParts) {
* @return The object node with the preset fields
*/
private ObjectNode createDefaultRuleNode(String type) {
return OBJECT_MAPPER
return objectMapper
.createObjectNode()
.put("type", type)
//All legacy rule fallback to place to default

View File

@ -32,7 +32,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.api.records.QueueACL;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
@ -56,6 +55,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.util.DefaultPrettyPrinter;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.hadoop.classification.VisibleForTesting;
@ -327,14 +327,14 @@ private void performRuleConversion(FairScheduler fs)
placementConverter.convertPlacementPolicy(placementManager,
ruleHandler, capacitySchedulerConfig, usePercentages);
final ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
ObjectMapper mapper = new ObjectMapper();
// close output stream if we write to a file, leave it open otherwise
if (!consoleMode && rulesToFile) {
mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, true);
} else {
mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false);
}
ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
ObjectWriter writer = mapper.writer(new DefaultPrettyPrinter());
if (consoleMode && rulesToFile) {
System.out.println("======= " + MAPPING_RULES_JSON + " =======");

View File

@ -42,7 +42,6 @@
import org.apache.hadoop.service.ServiceOperations;
import org.apache.hadoop.ipc.CallerContext;
import org.apache.hadoop.util.ApplicationClassLoader;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
@ -321,7 +320,7 @@ protected void serviceStart() throws Exception {
}
}
objMapper = JacksonUtil.createBasicObjectMapper();
objMapper = new ObjectMapper();
objMapper.setAnnotationIntrospector(
new JaxbAnnotationIntrospector(TypeFactory.defaultInstance()));
jsonFactory = new MappingJsonFactory(objMapper);

View File

@ -18,13 +18,13 @@
package org.apache.hadoop.yarn.server.timeline;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils;
@ -298,6 +298,7 @@ public void close() throws IOException {
}
};
}
static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
@SuppressWarnings("unchecked")
private V getEntityForKey(byte[] key) throws IOException {
@ -305,7 +306,7 @@ private V getEntityForKey(byte[] key) throws IOException {
if (resultRaw == null) {
return null;
}
return (V) JacksonUtil.getSharedReader().readValue(resultRaw, TimelineEntity.class);
return (V) OBJECT_MAPPER.readValue(resultRaw, TimelineEntity.class);
}
private byte[] getStartTimeKey(K entityId) {

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.yarn.server.timeline;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.util.MinimalPrettyPrinter;
import com.fasterxml.jackson.databind.ObjectMapper;
@ -30,7 +31,6 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
@ -108,7 +108,7 @@ static FSDataOutputStream createLogFile(Path logPath, FileSystem fs)
}
static ObjectMapper createObjectMapper() {
ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
ObjectMapper mapper = new ObjectMapper();
mapper.setAnnotationIntrospector(
new JaxbAnnotationIntrospector(TypeFactory.defaultInstance()));
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
@ -230,9 +230,10 @@ static TimelineEvent createEvent(long timestamp, String type, Map<String,
static void writeEntities(TimelineEntities entities, Path logPath,
FileSystem fs) throws IOException {
FSDataOutputStream outStream = createLogFile(logPath, fs);
ObjectMapper objMapper = createObjectMapper();
JsonGenerator jsonGenerator = objMapper.createGenerator((OutputStream)outStream);
JsonGenerator jsonGenerator
= new JsonFactory().createGenerator((OutputStream)outStream);
jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
ObjectMapper objMapper = createObjectMapper();
for (TimelineEntity entity : entities.getEntities()) {
objMapper.writeValue(jsonGenerator, entity);
}

View File

@ -23,7 +23,6 @@
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.util.JacksonUtil;
/**
* A simple util class for Json SerDe.
@ -32,7 +31,7 @@ public final class JsonUtils {
private JsonUtils(){}
private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper();
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
static {
OBJECT_MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);

View File

@ -46,7 +46,6 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.util.JacksonUtil;
import org.apache.hadoop.yarn.api.records.timeline.TimelineHealth;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent;
@ -105,10 +104,11 @@ String getRootPath() {
return rootPath.toString();
}
private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper();
private static ObjectMapper mapper;
static {
YarnJacksonJaxbJsonProvider.configObjectMapper(OBJECT_MAPPER);
mapper = new ObjectMapper();
YarnJacksonJaxbJsonProvider.configObjectMapper(mapper);
}
/**
@ -127,7 +127,7 @@ String getRootPath() {
public static <T> T getTimelineRecordFromJSON(
String jsonString, Class<T> clazz)
throws JsonGenerationException, JsonMappingException, IOException {
return OBJECT_MAPPER.readValue(jsonString, clazz);
return mapper.readValue(jsonString, clazz);
}
private static void fillFields(TimelineEntity finalEntity,