diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkItem.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkItem.java index f46a987bca..592a89facf 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkItem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkItem.java @@ -24,6 +24,7 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.htrace.fasterxml.jackson.annotation.JsonInclude; import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.map.ObjectReader; import java.io.IOException; @@ -34,6 +35,10 @@ @InterfaceStability.Unstable @JsonInclude(JsonInclude.Include.NON_DEFAULT) public class DiskBalancerWorkItem { + private static final ObjectMapper MAPPER = new ObjectMapper(); + private static final ObjectReader READER = + new ObjectMapper().reader(DiskBalancerWorkItem.class); + private long startTime; private long secondsElapsed; private long bytesToCopy; @@ -74,8 +79,7 @@ public DiskBalancerWorkItem(long bytesToCopy, long bytesCopied) { */ public static DiskBalancerWorkItem parseJson(String json) throws IOException { Preconditions.checkNotNull(json); - ObjectMapper mapper = new ObjectMapper(); - return mapper.readValue(json, DiskBalancerWorkItem.class); + return READER.readValue(json); } /** @@ -169,8 +173,7 @@ public void incBlocksCopied() { * @throws IOException */ public String toJson() throws IOException { - ObjectMapper mapper = new ObjectMapper(); - return mapper.writeValueAsString(this); + return MAPPER.writeValueAsString(this); } /** diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkStatus.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkStatus.java index 14789b687f..94bf6a6509 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkStatus.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkStatus.java @@ -24,6 +24,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.map.ObjectReader; import org.codehaus.jackson.map.SerializationConfig; import static org.codehaus.jackson.map.type.TypeFactory.defaultInstance; @@ -38,6 +39,15 @@ @InterfaceAudience.Private @InterfaceStability.Unstable public class DiskBalancerWorkStatus { + private static final ObjectMapper MAPPER = new ObjectMapper(); + private static final ObjectMapper MAPPER_WITH_INDENT_OUTPUT = + new ObjectMapper().enable( + SerializationConfig.Feature.INDENT_OUTPUT); + private static final ObjectReader READER_WORKSTATUS = + new ObjectMapper().reader(DiskBalancerWorkStatus.class); + private static final ObjectReader READER_WORKENTRY = new ObjectMapper() + .reader(defaultInstance().constructCollectionType(List.class, + DiskBalancerWorkEntry.class)); private final List currentState; private Result result; @@ -92,10 +102,7 @@ public DiskBalancerWorkStatus(Result result, String planID, String planFile, this.result = result; this.planID = planID; this.planFile = planFile; - ObjectMapper mapper = new ObjectMapper(); - this.currentState = mapper.readValue(currentState, - defaultInstance().constructCollectionType( - List.class, DiskBalancerWorkEntry.class)); + this.currentState = READER_WORKENTRY.readValue(currentState); } @@ -141,15 +148,11 @@ public List getCurrentState() { * @throws IOException **/ public String currentStateString() throws IOException { - ObjectMapper mapper = new ObjectMapper(); - mapper.enable(SerializationConfig.Feature.INDENT_OUTPUT); - return mapper.writeValueAsString(currentState); + return MAPPER_WITH_INDENT_OUTPUT.writeValueAsString(currentState); } public String toJsonString() throws IOException { - ObjectMapper mapper = new ObjectMapper(); - return mapper.writeValueAsString(this); - + return MAPPER.writeValueAsString(this); } /** @@ -160,8 +163,7 @@ public String toJsonString() throws IOException { */ public static DiskBalancerWorkStatus parseJson(String json) throws IOException { - ObjectMapper mapper = new ObjectMapper(); - return mapper.readValue(json, DiskBalancerWorkStatus.class); + return READER_WORKSTATUS.readValue(json); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancer.java index 523c0a674d..ec72d97d5c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancer.java @@ -34,8 +34,8 @@ import org.apache.hadoop.hdfs.server.diskbalancer.DiskBalancerException; import org.apache.hadoop.hdfs.server.diskbalancer.planner.NodePlan; import org.apache.hadoop.hdfs.server.diskbalancer.planner.Step; +import org.apache.hadoop.hdfs.web.JsonUtil; import org.apache.hadoop.util.Time; -import org.codehaus.jackson.map.ObjectMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -263,8 +263,7 @@ public String getVolumeNames() throws DiskBalancerException { for (Map.Entry entry : volMap.entrySet()) { pathMap.put(entry.getKey(), entry.getValue().getBasePath()); } - ObjectMapper mapper = new ObjectMapper(); - return mapper.writeValueAsString(pathMap); + return JsonUtil.toJsonString(pathMap); } catch (DiskBalancerException ex) { throw ex; } catch (IOException e) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java index 3110c1a8ec..a1c15aed68 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java @@ -44,6 +44,7 @@ import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation; import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.map.ObjectReader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -69,6 +70,8 @@ * Common interface for command handling. */ public abstract class Command extends Configured { + private static final ObjectReader READER = + new ObjectMapper().reader(HashMap.class); static final Logger LOG = LoggerFactory.getLogger(Command.class); private Map validArgs = new HashMap<>(); private URI clusterURI; @@ -441,11 +444,10 @@ protected void populatePathNames( ClientDatanodeProtocol dnClient = getDataNodeProxy(dnAddress); String volumeNameJson = dnClient.getDiskBalancerSetting( DiskBalancerConstants.DISKBALANCER_VOLUME_NAME); - ObjectMapper mapper = new ObjectMapper(); @SuppressWarnings("unchecked") Map volumeMap = - mapper.readValue(volumeNameJson, HashMap.class); + READER.readValue(volumeNameJson); for (DiskBalancerVolumeSet set : node.getVolumeSets().values()) { for (DiskBalancerVolume vol : set.getVolumes()) { if (volumeMap.containsKey(vol.getUuid())) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/connectors/JsonNodeConnector.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/connectors/JsonNodeConnector.java index cc796486e1..b47beff490 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/connectors/JsonNodeConnector.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/connectors/JsonNodeConnector.java @@ -18,12 +18,14 @@ package org.apache.hadoop.hdfs.server.diskbalancer.connectors; import com.google.common.base.Preconditions; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerCluster; import org.apache.hadoop.hdfs.server.diskbalancer.datamodel .DiskBalancerDataNode; import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.map.ObjectReader; import java.io.File; import java.net.URL; @@ -35,6 +37,8 @@ public class JsonNodeConnector implements ClusterConnector { private static final Logger LOG = LoggerFactory.getLogger(JsonNodeConnector.class); + private static final ObjectReader READER = + new ObjectMapper().reader(DiskBalancerCluster.class); private final URL clusterURI; /** @@ -56,9 +60,7 @@ public List getNodes() throws Exception { Preconditions.checkNotNull(this.clusterURI); String dataFilePath = this.clusterURI.getPath(); LOG.info("Reading cluster info from file : " + dataFilePath); - ObjectMapper mapper = new ObjectMapper(); - DiskBalancerCluster cluster = - mapper.readValue(new File(dataFilePath), DiskBalancerCluster.class); + DiskBalancerCluster cluster = READER.readValue(new File(dataFilePath)); String message = String.format("Found %d node(s)", cluster.getNodes().size()); LOG.info(message); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java index 17a6ebbd79..8d7fb2d9eb 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hdfs.server.diskbalancer.datamodel; import com.google.common.base.Preconditions; + import org.apache.commons.io.FileUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -25,9 +26,11 @@ import org.apache.hadoop.hdfs.server.diskbalancer.planner.NodePlan; import org.apache.hadoop.hdfs.server.diskbalancer.planner.Planner; import org.apache.hadoop.hdfs.server.diskbalancer.planner.PlannerFactory; +import org.apache.hadoop.hdfs.web.JsonUtil; import org.codehaus.jackson.annotate.JsonIgnore; import org.codehaus.jackson.annotate.JsonIgnoreProperties; import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.map.ObjectReader; import java.io.File; import java.io.IOException; @@ -69,6 +72,8 @@ public class DiskBalancerCluster { private static final Logger LOG = LoggerFactory.getLogger(DiskBalancerCluster.class); + private static final ObjectReader READER = + new ObjectMapper().reader(DiskBalancerCluster.class); private final Set exclusionList; private final Set inclusionList; private ClusterConnector clusterConnector; @@ -118,8 +123,7 @@ public DiskBalancerCluster(ClusterConnector connector) throws IOException { * @throws IOException */ public static DiskBalancerCluster parseJson(String json) throws IOException { - ObjectMapper mapper = new ObjectMapper(); - return mapper.readValue(json, DiskBalancerCluster.class); + return READER.readValue(json); } /** @@ -232,8 +236,7 @@ public void setInclusionList(Set includeNodes) { * @throws IOException */ public String toJson() throws IOException { - ObjectMapper mapper = new ObjectMapper(); - return mapper.writeValueAsString(this); + return JsonUtil.toJsonString(this); } /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerVolume.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerVolume.java index a6a8bdcd0a..8b627b0f22 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerVolume.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerVolume.java @@ -19,9 +19,11 @@ import com.google.common.base.Preconditions; +import org.apache.hadoop.hdfs.web.JsonUtil; import org.codehaus.jackson.annotate.JsonIgnore; import org.codehaus.jackson.annotate.JsonIgnoreProperties; import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.map.ObjectReader; import java.io.IOException; @@ -30,6 +32,9 @@ */ @JsonIgnoreProperties(ignoreUnknown = true) public class DiskBalancerVolume { + private static final ObjectReader READER = + new ObjectMapper().reader(DiskBalancerVolume.class); + private String path; private long capacity; private String storageType; @@ -58,8 +63,7 @@ public DiskBalancerVolume() { * @throws IOException */ public static DiskBalancerVolume parseJson(String json) throws IOException { - ObjectMapper mapper = new ObjectMapper(); - return mapper.readValue(json, DiskBalancerVolume.class); + return READER.readValue(json); } /** @@ -305,8 +309,7 @@ public long computeEffectiveCapacity() { * @throws IOException */ public String toJson() throws IOException { - ObjectMapper mapper = new ObjectMapper(); - return mapper.writeValueAsString(this); + return JsonUtil.toJsonString(this); } /**