MAPREDUCE-6790. Update jackson from 1.9.13 to 2.x in hadoop-mapreduce.

This commit is contained in:
Akira Ajisaka 2016-11-07 11:19:21 +09:00
parent 049e7d27be
commit ca33bdd5c6
8 changed files with 30 additions and 25 deletions

View File

@ -85,6 +85,10 @@
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
</dependencies>
<build>

View File

@ -76,9 +76,9 @@
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.google.common.annotations.VisibleForTesting;
import com.sun.jersey.api.client.ClientHandlerException;

View File

@ -73,8 +73,6 @@
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.server.MiniYARNCluster;
import org.apache.hadoop.yarn.server.timeline.TimelineStore;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
@ -82,6 +80,9 @@
import org.junit.Test;
import org.mockito.Mockito;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
public class TestJobHistoryEventHandler {

View File

@ -53,6 +53,10 @@
<artifactId>jsonassert</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
</dependencies>
<build>

View File

@ -18,6 +18,9 @@
package org.apache.hadoop.mapred;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.core.JsonGenerator;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
@ -28,9 +31,6 @@
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.util.StringUtils;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.JsonGenerator;
import java.io.BufferedInputStream;
import java.io.InputStream;
@ -532,7 +532,7 @@ static void dumpConfiguration(Writer out, String configFile,
}
JsonFactory dumpFactory = new JsonFactory();
JsonGenerator dumpGenerator = dumpFactory.createJsonGenerator(out);
JsonGenerator dumpGenerator = dumpFactory.createGenerator(out);
QueueConfigurationParser parser;
boolean aclsEnabled = false;
if (conf != null) {

View File

@ -32,6 +32,10 @@
import javax.crypto.KeyGenerator;
import javax.crypto.SecretKey;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
@ -60,10 +64,6 @@
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.yarn.api.records.ReservationId;
import org.codehaus.jackson.JsonParseException;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectReader;
import com.google.common.base.Charsets;
@ -407,7 +407,6 @@ private void readTokensFromFiles(Configuration conf, Credentials credentials)
LOG.info("loading user's secret keys from " + tokensFileName);
String localFileName = new Path(tokensFileName).toUri().getPath();
boolean json_error = false;
try {
// read JSON
Map<String, String> nm = READER.readValue(new File(localFileName));
@ -416,15 +415,11 @@ private void readTokensFromFiles(Configuration conf, Credentials credentials)
credentials.addSecretKey(new Text(ent.getKey()), ent.getValue()
.getBytes(Charsets.UTF_8));
}
} catch (JsonMappingException e) {
json_error = true;
} catch (JsonParseException e) {
json_error = true;
}
if(json_error)
} catch (JsonMappingException | JsonParseException e) {
LOG.warn("couldn't parse Token Cache JSON file with user secret keys");
}
}
}
//get secret keys and tokens and store them into TokenCache
private void populateTokenCache(Configuration conf, Credentials credentials)

View File

@ -20,14 +20,15 @@
import java.util.HashSet;
import java.util.Set;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.CounterGroup;
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.node.ArrayNode;
import org.codehaus.jackson.node.ObjectNode;
/**
* Class containing utility methods to be used by JobHistoryEventHandler.

View File

@ -24,6 +24,8 @@
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
@ -36,8 +38,6 @@
import org.apache.hadoop.yarn.server.resourcemanager.RMNMInfo;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;