YARN-5713. Update jackson from 1.9.13 to 2.x in hadoop-yarn.

This commit is contained in:
Akira Ajisaka 2016-11-22 10:20:31 +09:00
parent 683e0c71fe
commit 6f8074298d
22 changed files with 115 additions and 133 deletions

View File

@ -865,6 +865,16 @@
<artifactId>jackson-annotations</artifactId>
<version>${jackson2.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.module</groupId>
<artifactId>jackson-module-jaxb-annotations</artifactId>
<version>${jackson2.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.jaxrs</groupId>
<artifactId>jackson-jaxrs-json-provider</artifactId>
<version>${jackson2.version}</version>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>

View File

@ -81,6 +81,10 @@
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
</dependency>
</dependencies>
<build>

View File

@ -32,7 +32,8 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.yarn.util.TimelineServiceHelper;
import org.codehaus.jackson.annotate.JsonSetter;
import com.fasterxml.jackson.annotation.JsonSetter;
/**
* The basic timeline entity data structure for timeline service v2. Timeline

View File

@ -86,22 +86,6 @@
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-client</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
@ -184,6 +168,22 @@
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.module</groupId>
<artifactId>jackson-module-jaxb-annotations</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.jaxrs</groupId>
<artifactId>jackson-jaxrs-json-provider</artifactId>
</dependency>
</dependencies>
<build>

View File

@ -57,14 +57,14 @@
import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonGenerator;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.SerializationConfig.Feature;
import org.codehaus.jackson.map.annotate.JsonSerialize.Inclusion;
import org.codehaus.jackson.util.MinimalPrettyPrinter;
import org.codehaus.jackson.xc.JaxbAnnotationIntrospector;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.util.MinimalPrettyPrinter;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.type.TypeFactory;
import com.fasterxml.jackson.module.jaxb.JaxbAnnotationIntrospector;
import com.sun.jersey.api.client.Client;
/**
@ -273,9 +273,9 @@ public void flush() throws IOException {
private ObjectMapper createObjectMapper() {
ObjectMapper mapper = new ObjectMapper();
mapper.setAnnotationIntrospector(new JaxbAnnotationIntrospector());
mapper.setSerializationInclusion(Inclusion.NON_NULL);
mapper.configure(Feature.CLOSE_CLOSEABLE, false);
mapper.setAnnotationIntrospector(
new JaxbAnnotationIntrospector(TypeFactory.defaultInstance()));
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
return mapper;
}
@ -366,7 +366,7 @@ public long getLastModifiedTime() {
protected void prepareForWrite() throws IOException{
this.stream = createLogFileStream(fs, logPath);
this.jsonGenerator = new JsonFactory().createJsonGenerator(stream);
this.jsonGenerator = new JsonFactory().createGenerator(stream);
this.jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
this.jsonGenerator.configure(
JsonGenerator.Feature.FLUSH_PASSED_TO_STREAM, false);

View File

@ -45,6 +45,7 @@
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter;
@ -77,7 +78,6 @@
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier;
import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider;
import org.codehaus.jackson.map.ObjectMapper;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;

View File

@ -21,6 +21,9 @@
import java.io.IOException;
import java.net.InetSocketAddress;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.conf.Configuration;
@ -32,9 +35,6 @@
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.util.YarnVersionInfo;
import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
/**
* The helper class for the timeline module.

View File

@ -28,9 +28,9 @@
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.yarn.webapp.view.DefaultPage;
import org.codehaus.jackson.map.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -21,13 +21,14 @@
import javax.ws.rs.core.MediaType;
import javax.ws.rs.ext.Provider;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.AnnotationIntrospector;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.type.TypeFactory;
import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import com.fasterxml.jackson.module.jaxb.JaxbAnnotationIntrospector;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider;
import org.codehaus.jackson.map.AnnotationIntrospector;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.annotate.JsonSerialize.Inclusion;
import org.codehaus.jackson.xc.JaxbAnnotationIntrospector;
import com.google.inject.Singleton;
@ -54,9 +55,10 @@ public ObjectMapper locateMapper(Class<?> type, MediaType mediaType) {
}
public static void configObjectMapper(ObjectMapper mapper) {
AnnotationIntrospector introspector = new JaxbAnnotationIntrospector();
AnnotationIntrospector introspector =
new JaxbAnnotationIntrospector(TypeFactory.defaultInstance());
mapper.setAnnotationIntrospector(introspector);
mapper.setSerializationInclusion(Inclusion.NON_NULL);
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
}
}

View File

@ -63,18 +63,6 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>

View File

@ -18,6 +18,12 @@
package org.apache.hadoop.registry.client.binding;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.google.common.base.Preconditions;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
@ -30,13 +36,6 @@
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.registry.client.exceptions.InvalidRecordException;
import org.apache.hadoop.registry.client.exceptions.NoRecordException;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.JsonParseException;
import org.codehaus.jackson.JsonProcessingException;
import org.codehaus.jackson.map.DeserializationConfig;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.SerializationConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -77,8 +76,7 @@ public JsonSerDeser(Class<T> classType) {
Preconditions.checkArgument(classType != null, "null classType");
this.classType = classType;
this.mapper = new ObjectMapper();
mapper.configure(DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
}
/**
@ -94,7 +92,8 @@ public String getName() {
*
* @param json input
* @return the parsed JSON
* @throws IOException IO
* @throws IOException IO problems
* @throws JsonParseException If the input is not well-formatted
* @throws JsonMappingException failure to map from the JSON to this class
*/
@SuppressWarnings("unchecked")
@ -113,6 +112,7 @@ public synchronized T fromJson(String json)
* @param jsonFile input file
* @return the parsed JSON
* @throws IOException IO problems
* @throws JsonParseException If the input is not well-formatted
* @throws JsonMappingException failure to map from the JSON to this class
*/
@SuppressWarnings("unchecked")
@ -131,6 +131,7 @@ public synchronized T fromFile(File jsonFile)
* @param resource input file
* @return the parsed JSON
* @throws IOException IO problems
* @throws JsonParseException If the input is not well-formatted
* @throws JsonMappingException failure to map from the JSON to this class
*/
@SuppressWarnings({"IOResourceOpenedButNotSafelyClosed"})
@ -281,13 +282,10 @@ public T fromBytes(String path, byte[] bytes, String marker)
* Convert an instance to a JSON string
* @param instance instance to convert
* @return a JSON string description
* @throws JsonParseException parse problems
* @throws JsonMappingException O/J mapping problems
* @throws JsonProcessingException Json generation problems
*/
public synchronized String toJson(T instance) throws IOException,
JsonGenerationException,
JsonMappingException {
mapper.configure(SerializationConfig.Feature.INDENT_OUTPUT, true);
public synchronized String toJson(T instance) throws JsonProcessingException {
mapper.configure(SerializationFeature.INDENT_OUTPUT, true);
return mapper.writeValueAsString(instance);
}
@ -302,7 +300,7 @@ public String toString(T instance) {
Preconditions.checkArgument(instance != null, "Null instance argument");
try {
return toJson(instance);
} catch (IOException e) {
} catch (JsonProcessingException e) {
return "Failed to convert to a string: " + e;
}
}

View File

@ -18,13 +18,13 @@
package org.apache.hadoop.registry.client.types;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.google.common.base.Preconditions;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.registry.client.binding.JsonSerDeser;
import org.apache.hadoop.registry.client.binding.RegistryTypeUtils;
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import org.codehaus.jackson.map.annotate.JsonSerialize;
import java.net.URI;
import java.util.ArrayList;

View File

@ -19,10 +19,10 @@
package org.apache.hadoop.registry.client.types;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import org.codehaus.jackson.annotate.JsonProperty;
/**
* Output of a <code>RegistryOperations.stat()</code> call

View File

@ -18,13 +18,12 @@
package org.apache.hadoop.registry.client.types;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.google.common.base.Preconditions;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.registry.client.exceptions.InvalidRecordException;
import org.codehaus.jackson.annotate.JsonAnyGetter;
import org.codehaus.jackson.annotate.JsonAnySetter;
import org.codehaus.jackson.map.annotate.JsonSerialize;
import java.util.ArrayList;
import java.util.HashMap;

View File

@ -19,11 +19,11 @@
import java.io.IOException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectReader;
import org.codehaus.jackson.map.ObjectWriter;
/**
* A utility class providing methods for serializing and deserializing

View File

@ -100,18 +100,6 @@
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>

View File

@ -17,6 +17,11 @@
package org.apache.hadoop.yarn.server.timeline;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.databind.MappingJsonFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.type.TypeFactory;
import com.fasterxml.jackson.module.jaxb.JaxbAnnotationIntrospector;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.hadoop.classification.InterfaceAudience;
@ -49,10 +54,6 @@
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.server.timeline.TimelineDataManager.CheckAcl;
import org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.map.MappingJsonFactory;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.xc.JaxbAnnotationIntrospector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -293,7 +294,8 @@ protected void serviceStart() throws Exception {
}
objMapper = new ObjectMapper();
objMapper.setAnnotationIntrospector(new JaxbAnnotationIntrospector());
objMapper.setAnnotationIntrospector(
new JaxbAnnotationIntrospector(TypeFactory.defaultInstance()));
jsonFactory = new MappingJsonFactory(objMapper);
final long scanIntervalSecs = conf.getLong(
YarnConfiguration

View File

@ -16,6 +16,11 @@
*/
package org.apache.hadoop.yarn.server.timeline;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.MappingIterator;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.FSDataInputStream;
@ -31,11 +36,6 @@
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntityGroupId;
import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonParseException;
import org.codehaus.jackson.JsonParser;
import org.codehaus.jackson.map.MappingIterator;
import org.codehaus.jackson.map.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -140,7 +140,7 @@ private long parsePath(TimelineDataManager tdm, Path logPath,
try {
in.seek(offset);
try {
parser = jsonFactory.createJsonParser(in);
parser = jsonFactory.createParser(in);
parser.configure(JsonParser.Feature.AUTO_CLOSE_SOURCE, false);
} catch (IOException e) {
// if app hasn't completed then there may be errors due to the

View File

@ -17,6 +17,13 @@
*/
package org.apache.hadoop.yarn.server.timeline;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.util.MinimalPrettyPrinter;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.type.TypeFactory;
import com.fasterxml.jackson.module.jaxb.JaxbAnnotationIntrospector;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FSDataOutputStream;
@ -30,13 +37,6 @@
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonGenerator;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.SerializationConfig;
import org.codehaus.jackson.map.annotate.JsonSerialize;
import org.codehaus.jackson.util.MinimalPrettyPrinter;
import org.codehaus.jackson.xc.JaxbAnnotationIntrospector;
import java.io.IOException;
import java.util.ArrayList;
@ -108,9 +108,9 @@ static FSDataOutputStream createLogFile(Path logPath, FileSystem fs)
static ObjectMapper createObjectMapper() {
ObjectMapper mapper = new ObjectMapper();
mapper.setAnnotationIntrospector(new JaxbAnnotationIntrospector());
mapper.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
mapper.configure(SerializationConfig.Feature.CLOSE_CLOSEABLE, false);
mapper.setAnnotationIntrospector(
new JaxbAnnotationIntrospector(TypeFactory.defaultInstance()));
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
return mapper;
}
@ -232,7 +232,7 @@ static void writeEntities(TimelineEntities entities, Path logPath,
FileSystem fs) throws IOException {
FSDataOutputStream outStream = createLogFile(logPath, fs);
JsonGenerator jsonGenerator
= (new JsonFactory()).createJsonGenerator(outStream);
= new JsonFactory().createGenerator(outStream);
jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
ObjectMapper objMapper = createObjectMapper();
for (TimelineEntity entity : entities.getEntities()) {

View File

@ -16,6 +16,10 @@
*/
package org.apache.hadoop.yarn.server.timeline;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.util.MinimalPrettyPrinter;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileContext;
@ -30,10 +34,6 @@
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonGenerator;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.util.MinimalPrettyPrinter;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@ -232,7 +232,7 @@ private void writeEntitiesLeaveOpen(TimelineEntities entities, Path logPath)
throws IOException {
if (outStream == null) {
outStream = PluginStoreTestUtils.createLogFile(logPath, fs);
jsonGenerator = (new JsonFactory()).createJsonGenerator(outStream);
jsonGenerator = new JsonFactory().createGenerator(outStream);
jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
}
for (TimelineEntity entity : entities.getEntities()) {
@ -248,7 +248,7 @@ private void writeDomainLeaveOpen(TimelineDomain domain, Path logPath)
}
// Write domain uses its own json generator to isolate from entity writers
JsonGenerator jsonGeneratorLocal
= (new JsonFactory()).createJsonGenerator(outStreamDomain);
= new JsonFactory().createGenerator(outStreamDomain);
jsonGeneratorLocal.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
objMapper.writeValue(jsonGeneratorLocal, domain);
outStreamDomain.hflush();

View File

@ -121,16 +121,6 @@
<artifactId>commons-csv</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>

View File

@ -33,6 +33,9 @@
import java.util.Set;
import java.util.TreeMap;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
@ -49,9 +52,6 @@
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineStorageUtils;
import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
import com.google.common.annotations.VisibleForTesting;