diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index b05faa65d6..508f9ac036 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -266,7 +266,10 @@ Release 2.5.0 - UNRELEASED
HDFS-6119. FSNamesystem code cleanup. (suresh)
- HDFS-6158. Clean up dead code for OfflineImageViewer (wheat9)
+ HDFS-6158. Clean up dead code for OfflineImageViewer. (wheat9)
+
+ HDFS-5978. Create a tool to take fsimage and expose read-only WebHDFS API.
+ (Akira Ajisaka via wheat9)
OPTIMIZATIONS
diff --git a/hadoop-hdfs-project/hadoop-hdfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
index 26ea55b4a3..442cf58284 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
@@ -179,7 +179,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
io.netty
netty
- test
+ compile
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
new file mode 100644
index 0000000000..dc2c39ab4b
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
@@ -0,0 +1,95 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.tools.offlineImageViewer;
+
+import java.io.IOException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.jboss.netty.channel.ChannelFutureListener;
+import org.jboss.netty.channel.ChannelHandlerContext;
+import org.jboss.netty.channel.MessageEvent;
+import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
+import org.jboss.netty.handler.codec.http.DefaultHttpResponse;
+import org.jboss.netty.handler.codec.http.HttpHeaders;
+import org.jboss.netty.handler.codec.http.HttpMethod;
+import org.jboss.netty.handler.codec.http.HttpRequest;
+import org.jboss.netty.handler.codec.http.HttpResponse;
+import org.jboss.netty.handler.codec.http.HttpResponseStatus;
+import org.jboss.netty.handler.codec.http.HttpVersion;
+import org.jboss.netty.handler.codec.http.QueryStringDecoder;
+
+/**
+ * Implement the read-only WebHDFS API for fsimage.
+ */
+public class FSImageHandler extends SimpleChannelUpstreamHandler {
+ public static final Log LOG = LogFactory.getLog(FSImageHandler.class);
+ private final FSImageLoader loader;
+
+ public FSImageHandler(FSImageLoader loader) throws IOException {
+ this.loader = loader;
+ }
+
+ @Override
+ public void messageReceived(
+ ChannelHandlerContext ctx, MessageEvent e) throws Exception {
+ HttpRequest request = (HttpRequest) e.getMessage();
+ if (request.getMethod() == HttpMethod.GET){
+ String uri = request.getUri();
+ QueryStringDecoder decoder = new QueryStringDecoder(uri);
+
+ String op = "null";
+ if (decoder.getParameters().containsKey("op")) {
+ op = decoder.getParameters().get("op").get(0).toUpperCase();
+ }
+ HttpResponse response = new DefaultHttpResponse(
+ HttpVersion.HTTP_1_1, HttpResponseStatus.OK);
+ String json = null;
+
+ if (op.equals("LISTSTATUS")) {
+ try {
+ json = loader.listStatus(decoder.getPath());
+ response.setStatus(HttpResponseStatus.OK);
+ response.setHeader(HttpHeaders.Names.CONTENT_TYPE,
+ "application/json");
+ HttpHeaders.setContentLength(response, json.length());
+ } catch (Exception ex) {
+ LOG.warn(ex.getMessage());
+ response.setStatus(HttpResponseStatus.NOT_FOUND);
+ }
+ } else {
+ response.setStatus(HttpResponseStatus.BAD_REQUEST);
+ }
+
+ e.getChannel().write(response);
+ if (json != null) {
+ e.getChannel().write(json);
+ }
+ LOG.info(response.getStatus().getCode() + " method=GET op=" + op
+ + " target=" + decoder.getPath());
+ } else {
+ // only HTTP GET is allowed since fsimage is read-only.
+ HttpResponse response = new DefaultHttpResponse(HttpVersion.HTTP_1_1,
+ HttpResponseStatus.METHOD_NOT_ALLOWED);
+ e.getChannel().write(response);
+ LOG.info(response.getStatus().getCode() + " method="
+ + request.getMethod().getName());
+ }
+ e.getFuture().addListener(ChannelFutureListener.CLOSE);
+ }
+}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java
new file mode 100644
index 0000000000..f053e8e3be
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java
@@ -0,0 +1,369 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.tools.offlineImageViewer;
+
+import java.io.BufferedInputStream;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.RandomAccessFile;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.fs.permission.PermissionStatus;
+import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos;
+import org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode;
+import org.apache.hadoop.hdfs.server.namenode.FSImageFormatProtobuf;
+import org.apache.hadoop.hdfs.server.namenode.FSImageUtil;
+import org.apache.hadoop.hdfs.server.namenode.FsImageProto;
+import org.apache.hadoop.hdfs.server.namenode.INodeId;
+import org.apache.hadoop.io.IOUtils;
+import org.codehaus.jackson.map.ObjectMapper;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.io.LimitInputStream;
+
+/**
+ * FSImageLoader loads fsimage and provide methods to return JSON formatted
+ * file status of the namespace of the fsimage.
+ */
+public class FSImageLoader {
+ public static final Log LOG = LogFactory.getLog(FSImageHandler.class);
+
+ private static String[] stringTable;
+ private static Map inodes =
+ Maps.newHashMap();
+ private static Map dirmap = Maps.newHashMap();
+ private static List
+ refList = Lists.newArrayList();
+
+ private FSImageLoader() {}
+
+ /**
+ * Load fsimage into the memory.
+ * @param inputFile the filepath of the fsimage to load.
+ * @return FSImageLoader
+ * @throws IOException if failed to load fsimage.
+ */
+ public static FSImageLoader load(String inputFile) throws IOException {
+ Configuration conf = new Configuration();
+ RandomAccessFile file = new RandomAccessFile(inputFile, "r");
+ if (!FSImageUtil.checkFileFormat(file)) {
+ throw new IOException("Unrecognized FSImage");
+ }
+
+ FsImageProto.FileSummary summary = FSImageUtil.loadSummary(file);
+ FileInputStream fin = null;
+ try {
+ fin = new FileInputStream(file.getFD());
+
+ ArrayList sections =
+ Lists.newArrayList(summary.getSectionsList());
+ Collections.sort(sections,
+ new Comparator() {
+ @Override
+ public int compare(FsImageProto.FileSummary.Section s1,
+ FsImageProto.FileSummary.Section s2) {
+ FSImageFormatProtobuf.SectionName n1 =
+ FSImageFormatProtobuf.SectionName.fromString(s1.getName());
+ FSImageFormatProtobuf.SectionName n2 =
+ FSImageFormatProtobuf.SectionName.fromString(s2.getName());
+ if (n1 == null) {
+ return n2 == null ? 0 : -1;
+ } else if (n2 == null) {
+ return -1;
+ } else {
+ return n1.ordinal() - n2.ordinal();
+ }
+ }
+ });
+
+ for (FsImageProto.FileSummary.Section s : sections) {
+ fin.getChannel().position(s.getOffset());
+ InputStream is = FSImageUtil.wrapInputStreamForCompression(conf,
+ summary.getCodec(), new BufferedInputStream(new LimitInputStream(
+ fin, s.getLength())));
+
+ switch (FSImageFormatProtobuf.SectionName.fromString(s.getName())) {
+ case STRING_TABLE:
+ loadStringTable(is);
+ break;
+ case INODE:
+ loadINodeSection(is);
+ break;
+ case INODE_REFERENCE:
+ loadINodeReferenceSection(is);
+ break;
+ case INODE_DIR:
+ loadINodeDirectorySection(is);
+ break;
+ default:
+ break;
+ }
+ }
+ } finally {
+ IOUtils.cleanup(null, fin);
+ }
+ return new FSImageLoader();
+ }
+
+ private static void loadINodeDirectorySection(InputStream in)
+ throws IOException {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Loading directory section");
+ }
+ while (true) {
+ FsImageProto.INodeDirectorySection.DirEntry e =
+ FsImageProto.INodeDirectorySection.DirEntry.parseDelimitedFrom(in);
+ // note that in is a LimitedInputStream
+ if (e == null) {
+ break;
+ }
+ long[] l = new long[e.getChildrenCount() + e.getRefChildrenCount()];
+ for (int i = 0; i < e.getChildrenCount(); ++i) {
+ l[i] = e.getChildren(i);
+ }
+ for (int i = e.getChildrenCount(); i < l.length; i++) {
+ int refId = e.getRefChildren(i - e.getChildrenCount());
+ l[i] = refList.get(refId).getReferredId();
+ }
+ dirmap.put(e.getParent(), l);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Loaded directory (parent " + e.getParent()
+ + ") with " + e.getChildrenCount() + " children and "
+ + e.getRefChildrenCount() + " reference children");
+ }
+ }
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Loaded " + dirmap.size() + " directories");
+ }
+ }
+
+ private static void loadINodeReferenceSection(InputStream in)
+ throws IOException {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Loading inode reference section");
+ }
+ while (true) {
+ FsImageProto.INodeReferenceSection.INodeReference e =
+ FsImageProto.INodeReferenceSection.INodeReference
+ .parseDelimitedFrom(in);
+ if (e == null) {
+ break;
+ }
+ refList.add(e);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Loaded inode reference named '" + e.getName()
+ + "' referring to id " + e.getReferredId() + "");
+ }
+ }
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Loaded " + refList.size() + " inode references");
+ }
+ }
+
+ private static void loadINodeSection(InputStream in) throws IOException {
+ FsImageProto.INodeSection s = FsImageProto.INodeSection
+ .parseDelimitedFrom(in);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Found " + s.getNumInodes() + " inodes in inode section");
+ }
+ for (int i = 0; i < s.getNumInodes(); ++i) {
+ FsImageProto.INodeSection.INode p = FsImageProto.INodeSection.INode
+ .parseDelimitedFrom(in);
+ inodes.put(p.getId(), p);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Loaded inode id " + p.getId() + " type " + p.getType()
+ + " name '" + p.getName().toStringUtf8() + "'");
+ }
+ }
+ }
+
+ private static void loadStringTable(InputStream in) throws IOException {
+ FsImageProto.StringTableSection s = FsImageProto.StringTableSection
+ .parseDelimitedFrom(in);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Found " + s.getNumEntry() + " strings in string section");
+ }
+ stringTable = new String[s.getNumEntry() + 1];
+ for (int i = 0; i < s.getNumEntry(); ++i) {
+ FsImageProto.StringTableSection.Entry e = FsImageProto
+ .StringTableSection.Entry.parseDelimitedFrom(in);
+ stringTable[e.getId()] = e.getStr();
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("Loaded string " + e.getStr());
+ }
+ }
+ }
+
+ /**
+ * Return the JSON formatted list of the files in the specified directory.
+ * @param path a path specifies a directory to list
+ * @return JSON formatted file list in the directory
+ * @throws IOException if failed to serialize fileStatus to JSON.
+ */
+ public String listStatus(String path) throws IOException {
+ StringBuilder sb = new StringBuilder();
+ ObjectMapper mapper = new ObjectMapper();
+ List