HDFS-6169. Move the address in WebImageViewer. Contributed by Akira Ajisaka.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1585802 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
245012a9d9
commit
7915b36225
@ -320,6 +320,8 @@ Release 2.5.0 - UNRELEASED
|
||||
HDFS-6143. WebHdfsFileSystem open should throw FileNotFoundException for
|
||||
non-existing paths. (Gera Shegalov via wheat9)
|
||||
|
||||
HDFS-6169. Move the address in WebImageViewer. (Akira Ajisaka via wheat9)
|
||||
|
||||
Release 2.4.1 - UNRELEASED
|
||||
|
||||
INCOMPATIBLE CHANGES
|
||||
|
@ -17,7 +17,10 @@
|
||||
*/
|
||||
package org.apache.hadoop.hdfs.tools.offlineImageViewer;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
@ -48,48 +51,81 @@ public FSImageHandler(FSImageLoader loader) throws IOException {
|
||||
@Override
|
||||
public void messageReceived(
|
||||
ChannelHandlerContext ctx, MessageEvent e) throws Exception {
|
||||
String op = getOp(e);
|
||||
try {
|
||||
String path = getPath(e);
|
||||
handleOperation(op, path, e);
|
||||
} catch (Exception ex) {
|
||||
notFoundResponse(e);
|
||||
LOG.warn(ex.getMessage());
|
||||
} finally {
|
||||
e.getFuture().addListener(ChannelFutureListener.CLOSE);
|
||||
}
|
||||
}
|
||||
|
||||
/** return the op parameter in upper case */
|
||||
private String getOp(MessageEvent e) {
|
||||
Map<String, List<String>> parameters = getDecoder(e).getParameters();
|
||||
if (parameters.containsKey("op")) {
|
||||
return parameters.get("op").get(0).toUpperCase();
|
||||
} else {
|
||||
// return "" to avoid NPE
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
private String getPath(MessageEvent e) throws FileNotFoundException {
|
||||
String path = getDecoder(e).getPath();
|
||||
// trim "/webhdfs/v1" to keep compatibility with WebHDFS API
|
||||
if (path.startsWith("/webhdfs/v1/")) {
|
||||
return path.replaceFirst("/webhdfs/v1", "");
|
||||
} else {
|
||||
throw new FileNotFoundException("Path: " + path + " should " +
|
||||
"start with \"/webhdfs/v1/\"");
|
||||
}
|
||||
}
|
||||
|
||||
private QueryStringDecoder getDecoder(MessageEvent e) {
|
||||
HttpRequest request = (HttpRequest) e.getMessage();
|
||||
return new QueryStringDecoder(request.getUri());
|
||||
}
|
||||
|
||||
private void handleOperation(String op, String path, MessageEvent e)
|
||||
throws IOException {
|
||||
HttpRequest request = (HttpRequest) e.getMessage();
|
||||
HttpResponse response = new DefaultHttpResponse(
|
||||
HttpVersion.HTTP_1_1, HttpResponseStatus.OK);
|
||||
response.setHeader(HttpHeaders.Names.CONTENT_TYPE,
|
||||
"application/json");
|
||||
String content = null;
|
||||
|
||||
if (request.getMethod() == HttpMethod.GET){
|
||||
String uri = request.getUri();
|
||||
QueryStringDecoder decoder = new QueryStringDecoder(uri);
|
||||
|
||||
String op = "null";
|
||||
if (decoder.getParameters().containsKey("op")) {
|
||||
op = decoder.getParameters().get("op").get(0).toUpperCase();
|
||||
}
|
||||
HttpResponse response = new DefaultHttpResponse(
|
||||
HttpVersion.HTTP_1_1, HttpResponseStatus.OK);
|
||||
String json = null;
|
||||
|
||||
if (op.equals("LISTSTATUS")) {
|
||||
try {
|
||||
json = loader.listStatus(decoder.getPath());
|
||||
response.setStatus(HttpResponseStatus.OK);
|
||||
response.setHeader(HttpHeaders.Names.CONTENT_TYPE,
|
||||
"application/json");
|
||||
HttpHeaders.setContentLength(response, json.length());
|
||||
} catch (Exception ex) {
|
||||
LOG.warn(ex.getMessage());
|
||||
response.setStatus(HttpResponseStatus.NOT_FOUND);
|
||||
}
|
||||
content = loader.listStatus(path);
|
||||
} else {
|
||||
response.setStatus(HttpResponseStatus.BAD_REQUEST);
|
||||
}
|
||||
|
||||
e.getChannel().write(response);
|
||||
if (json != null) {
|
||||
e.getChannel().write(json);
|
||||
}
|
||||
LOG.info(response.getStatus().getCode() + " method=GET op=" + op
|
||||
+ " target=" + decoder.getPath());
|
||||
} else {
|
||||
// only HTTP GET is allowed since fsimage is read-only.
|
||||
HttpResponse response = new DefaultHttpResponse(HttpVersion.HTTP_1_1,
|
||||
HttpResponseStatus.METHOD_NOT_ALLOWED);
|
||||
e.getChannel().write(response);
|
||||
LOG.info(response.getStatus().getCode() + " method="
|
||||
+ request.getMethod().getName());
|
||||
response.setStatus(HttpResponseStatus.METHOD_NOT_ALLOWED);
|
||||
}
|
||||
e.getFuture().addListener(ChannelFutureListener.CLOSE);
|
||||
|
||||
if (content != null) {
|
||||
HttpHeaders.setContentLength(response, content.length());
|
||||
}
|
||||
e.getChannel().write(response);
|
||||
|
||||
if (content != null) {
|
||||
e.getChannel().write(content);
|
||||
}
|
||||
|
||||
LOG.info(response.getStatus().getCode() + " method="
|
||||
+ request.getMethod().getName() + " op=" + op + " target=" + path);
|
||||
}
|
||||
|
||||
private void notFoundResponse(MessageEvent e) {
|
||||
HttpResponse response = new DefaultHttpResponse(
|
||||
HttpVersion.HTTP_1_1, HttpResponseStatus.NOT_FOUND);
|
||||
e.getChannel().write(response);
|
||||
}
|
||||
}
|
||||
|
@ -271,7 +271,7 @@ private long getINodeId(String strPath) {
|
||||
long id = INodeId.ROOT_INODE_ID;
|
||||
for (int i = 1; i < nameList.length; i++) {
|
||||
long[] children = dirmap.get(id);
|
||||
Preconditions.checkNotNull(children, "The specified path: " +
|
||||
Preconditions.checkNotNull(children, "File: " +
|
||||
strPath + " is not found in the fsimage.");
|
||||
String cName = nameList[i];
|
||||
boolean findChildren = false;
|
||||
@ -282,7 +282,7 @@ private long getINodeId(String strPath) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Preconditions.checkArgument(findChildren, "The specified path: " +
|
||||
Preconditions.checkArgument(findChildren, "File: " +
|
||||
strPath + " is not found in the fsimage.");
|
||||
}
|
||||
return id;
|
||||
|
@ -29,12 +29,12 @@
|
||||
import java.io.StringReader;
|
||||
import java.io.StringWriter;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
@ -55,13 +55,11 @@
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
|
||||
import org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil;
|
||||
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
|
||||
import org.apache.hadoop.io.IOUtils;
|
||||
import org.apache.hadoop.net.NetUtils;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
import org.codehaus.jackson.map.ObjectMapper;
|
||||
import org.codehaus.jackson.type.TypeReference;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
@ -246,56 +244,68 @@ public void testPBImageXmlWriter() throws IOException, SAXException,
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWebImageViewer() throws IOException, InterruptedException {
|
||||
public void testWebImageViewer() throws IOException, InterruptedException,
|
||||
URISyntaxException {
|
||||
WebImageViewer viewer = new WebImageViewer(
|
||||
NetUtils.createSocketAddr("localhost:0"));
|
||||
try {
|
||||
viewer.initServer(originalFsimage.getAbsolutePath());
|
||||
int port = viewer.getPort();
|
||||
|
||||
// 1. LISTSTATUS operation to a valid path
|
||||
URL url = new URL("http://localhost:" + port + "/?op=LISTSTATUS");
|
||||
// create a WebHdfsFileSystem instance
|
||||
URI uri = new URI("webhdfs://localhost:" + String.valueOf(port));
|
||||
Configuration conf = new Configuration();
|
||||
WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)FileSystem.get(uri, conf);
|
||||
|
||||
// verify the number of directories
|
||||
FileStatus[] statuses = webhdfs.listStatus(new Path("/"));
|
||||
assertEquals(NUM_DIRS, statuses.length);
|
||||
|
||||
// verify the number of files in the directory
|
||||
statuses = webhdfs.listStatus(new Path("/dir0"));
|
||||
assertEquals(FILES_PER_DIR, statuses.length);
|
||||
|
||||
// compare a file
|
||||
FileStatus status = webhdfs.listStatus(new Path("/dir0/file0"))[0];
|
||||
FileStatus expected = writtenFiles.get("/dir0/file0");
|
||||
assertEquals(expected.getAccessTime(), status.getAccessTime());
|
||||
assertEquals(expected.getBlockSize(), status.getBlockSize());
|
||||
assertEquals(expected.getGroup(), status.getGroup());
|
||||
assertEquals(expected.getLen(), status.getLen());
|
||||
assertEquals(expected.getModificationTime(),
|
||||
status.getModificationTime());
|
||||
assertEquals(expected.getOwner(), status.getOwner());
|
||||
assertEquals(expected.getPermission(), status.getPermission());
|
||||
assertEquals(expected.getReplication(), status.getReplication());
|
||||
assertEquals(expected.isDirectory(), status.isDirectory());
|
||||
|
||||
// LISTSTATUS operation to a invalid path
|
||||
URL url = new URL("http://localhost:" + port +
|
||||
"/webhdfs/v1/invalid/?op=LISTSTATUS");
|
||||
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
|
||||
connection.setRequestMethod("GET");
|
||||
connection.connect();
|
||||
assertEquals(HttpURLConnection.HTTP_OK, connection.getResponseCode());
|
||||
assertEquals("application/json", connection.getContentType());
|
||||
assertEquals(HttpURLConnection.HTTP_NOT_FOUND,
|
||||
connection.getResponseCode());
|
||||
|
||||
String content = org.apache.commons.io.IOUtils.toString(
|
||||
connection.getInputStream());
|
||||
LOG.info("content: " + content);
|
||||
|
||||
// verify the number of directories listed
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
Map<String, Map<String, List<Map<String, Object>>>> fileStatuses =
|
||||
mapper.readValue(content, new TypeReference
|
||||
<Map<String, Map<String, List<Map<String, Object>>>>>(){});
|
||||
List<Map<String, Object>> fileStatusList = fileStatuses
|
||||
.get("FileStatuses").get("FileStatus");
|
||||
assertEquals(NUM_DIRS, fileStatusList.size());
|
||||
|
||||
// verify the number of files in a directory
|
||||
Map<String, Object> fileStatusMap = fileStatusList.get(0);
|
||||
assertEquals(FILES_PER_DIR, fileStatusMap.get("childrenNum"));
|
||||
|
||||
// 2. LISTSTATUS operation to a invalid path
|
||||
url = new URL("http://localhost:" + port + "/invalid/?op=LISTSTATUS");
|
||||
// LISTSTATUS operation to a invalid prefix
|
||||
url = new URL("http://localhost:" + port + "/webhdfs/v1?op=LISTSTATUS");
|
||||
connection = (HttpURLConnection) url.openConnection();
|
||||
connection.setRequestMethod("GET");
|
||||
connection.connect();
|
||||
assertEquals(HttpURLConnection.HTTP_NOT_FOUND,
|
||||
connection.getResponseCode());
|
||||
|
||||
// 3. invalid operation
|
||||
url = new URL("http://localhost:" + port + "/?op=INVALID");
|
||||
// invalid operation
|
||||
url = new URL("http://localhost:" + port + "/webhdfs/v1/?op=INVALID");
|
||||
connection = (HttpURLConnection) url.openConnection();
|
||||
connection.setRequestMethod("GET");
|
||||
connection.connect();
|
||||
assertEquals(HttpURLConnection.HTTP_BAD_REQUEST,
|
||||
connection.getResponseCode());
|
||||
|
||||
// 4. invalid method
|
||||
url = new URL("http://localhost:" + port + "/?op=LISTSTATUS");
|
||||
// invalid method
|
||||
url = new URL("http://localhost:" + port + "/webhdfs/v1/?op=LISTSTATUS");
|
||||
connection = (HttpURLConnection) url.openConnection();
|
||||
connection.setRequestMethod("POST");
|
||||
connection.connect();
|
||||
|
Loading…
Reference in New Issue
Block a user