From 6b4f40cbf913c048fb33da6c5d8271cd77766d0a Mon Sep 17 00:00:00 2001 From: Alejandro Abdelnur Date: Wed, 21 Dec 2011 05:26:20 +0000 Subject: [PATCH] HttpFS server should check that upload requests have correct content-type. (tucu) git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1221616 13f79535-47bb-0310-9956-ffa450edef68 --- .../fs/http/client/HttpFSFileSystem.java | 4 +- .../server/CheckUploadContentTypeFilter.java | 112 ++++++++++++++++++ .../src/main/webapp/WEB-INF/web.xml | 10 ++ .../TestCheckUploadContentTypeFilter.java | 91 ++++++++++++++ hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 + 5 files changed, 219 insertions(+), 1 deletion(-) create mode 100644 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java create mode 100644 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestCheckUploadContentTypeFilter.java diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java index 520c7325fa..03c8548813 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java @@ -98,6 +98,8 @@ public class HttpFSFileSystem extends FileSystem { public static final String SET_REPLICATION_JSON = "boolean"; + public static final String UPLOAD_CONTENT_TYPE= "application/octet-stream"; + public static enum FILE_TYPE { FILE, DIRECTORY, SYMLINK; @@ -459,7 +461,7 @@ private FSDataOutputStream uploadData(String method, Path f, Map String location = conn.getHeaderField("Location"); if (location != null) { conn = getConnection(new URL(location), method); - conn.setRequestProperty("Content-Type", "application/octet-stream"); + conn.setRequestProperty("Content-Type", UPLOAD_CONTENT_TYPE); try { OutputStream os = new BufferedOutputStream(conn.getOutputStream(), bufferSize); return new HttpFSDataOutputStream(conn, os, expectedStatus, statistics); diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java new file mode 100644 index 0000000000..7e73666f58 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java @@ -0,0 +1,112 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.fs.http.server; + + +import org.apache.hadoop.fs.http.client.HttpFSFileSystem; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import java.io.IOException; +import java.net.InetAddress; +import java.util.HashSet; +import java.util.Set; + +/** + * Filter that Enforces the content-type to be application/octet-stream for + * POST and PUT requests. + */ +public class CheckUploadContentTypeFilter implements Filter { + + private static final Set UPLOAD_OPERATIONS = new HashSet(); + + static { + UPLOAD_OPERATIONS.add(HttpFSFileSystem.PostOpValues.APPEND.toString()); + UPLOAD_OPERATIONS.add(HttpFSFileSystem.PutOpValues.CREATE.toString()); + } + + /** + * Initializes the filter. + *

+ * This implementation is a NOP. + * + * @param config filter configuration. + * + * @throws ServletException thrown if the filter could not be initialized. + */ + @Override + public void init(FilterConfig config) throws ServletException { + } + + /** + * Enforces the content-type to be application/octet-stream for + * POST and PUT requests. + * + * @param request servlet request. + * @param response servlet response. + * @param chain filter chain. + * + * @throws IOException thrown if an IO error occurrs. + * @throws ServletException thrown if a servet error occurrs. + */ + @Override + public void doFilter(ServletRequest request, ServletResponse response, + FilterChain chain) + throws IOException, ServletException { + boolean contentTypeOK = true; + HttpServletRequest httpReq = (HttpServletRequest) request; + HttpServletResponse httpRes = (HttpServletResponse) response; + String method = httpReq.getMethod(); + if (method.equals("PUT") || method.equals("POST")) { + String op = httpReq.getParameter(HttpFSFileSystem.OP_PARAM); + if (op != null && UPLOAD_OPERATIONS.contains(op.toUpperCase())) { + if ("true".equalsIgnoreCase(httpReq.getParameter(HttpFSParams.DataParam.NAME))) { + String contentType = httpReq.getContentType(); + contentTypeOK = + HttpFSFileSystem.UPLOAD_CONTENT_TYPE.equalsIgnoreCase(contentType); + } + } + } + if (contentTypeOK) { + chain.doFilter(httpReq, httpRes); + } + else { + httpRes.sendError(HttpServletResponse.SC_BAD_REQUEST, + "Data upload requests must have content-type set to '" + + HttpFSFileSystem.UPLOAD_CONTENT_TYPE + "'"); + + } + } + + /** + * Destroys the filter. + *

+ * This implementation is a NOP. + */ + @Override + public void destroy() { + } + +} diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/webapp/WEB-INF/web.xml b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/webapp/WEB-INF/web.xml index 3ba374e369..4d5e976fc5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/webapp/WEB-INF/web.xml +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/webapp/WEB-INF/web.xml @@ -60,6 +60,11 @@ org.apache.hadoop.lib.servlet.HostnameFilter + + checkUploadContentType + org.apache.hadoop.fs.http.server.CheckUploadContentTypeFilter + + fsReleaseFilter org.apache.hadoop.fs.http.server.HttpFSReleaseFilter @@ -80,6 +85,11 @@ * + + checkUploadContentType + * + + fsReleaseFilter * diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestCheckUploadContentTypeFilter.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestCheckUploadContentTypeFilter.java new file mode 100644 index 0000000000..89497a4e04 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestCheckUploadContentTypeFilter.java @@ -0,0 +1,91 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.fs.http.server; + +import org.apache.hadoop.fs.http.client.HttpFSFileSystem; +import org.junit.Test; +import org.mockito.Mockito; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +public class TestCheckUploadContentTypeFilter { + + @Test + public void putUpload() throws Exception { + test("PUT", HttpFSFileSystem.PutOpValues.CREATE.toString(), "application/octet-stream", true, false); + } + + @Test + public void postUpload() throws Exception { + test("POST", HttpFSFileSystem.PostOpValues.APPEND.toString(), "APPLICATION/OCTET-STREAM", true, false); + } + + @Test + public void putUploadWrong() throws Exception { + test("PUT", HttpFSFileSystem.PutOpValues.CREATE.toString(), "plain/text", false, false); + test("PUT", HttpFSFileSystem.PutOpValues.CREATE.toString(), "plain/text", true, true); + } + + @Test + public void postUploadWrong() throws Exception { + test("POST", HttpFSFileSystem.PostOpValues.APPEND.toString(), "plain/text", false, false); + test("POST", HttpFSFileSystem.PostOpValues.APPEND.toString(), "plain/text", true, true); + } + + @Test + public void getOther() throws Exception { + test("GET", HttpFSFileSystem.GetOpValues.GETHOMEDIR.toString(), "plain/text", false, false); + } + + @Test + public void putOther() throws Exception { + test("PUT", HttpFSFileSystem.PutOpValues.MKDIRS.toString(), "plain/text", false, false); + } + + private void test(String method, String operation, String contentType, + boolean upload, boolean error) throws Exception { + HttpServletRequest request = Mockito.mock(HttpServletRequest.class); + HttpServletResponse response = Mockito.mock(HttpServletResponse.class); + Mockito.reset(request); + Mockito.when(request.getMethod()).thenReturn(method); + Mockito.when(request.getParameter(HttpFSFileSystem.OP_PARAM)).thenReturn(operation); + Mockito.when(request.getParameter(HttpFSParams.DataParam.NAME)). + thenReturn(Boolean.toString(upload)); + Mockito.when(request.getContentType()).thenReturn(contentType); + + FilterChain chain = Mockito.mock(FilterChain.class); + + Filter filter = new CheckUploadContentTypeFilter(); + + filter.doFilter(request, response, chain); + + if (error) { + Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_BAD_REQUEST), + Mockito.contains("Data upload")); + } + else { + Mockito.verify(chain).doFilter(request, response); + } + } + + +} diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index c6aedddd59..df916217d0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -185,6 +185,9 @@ Trunk (unreleased changes) HDFS-2657. TestHttpFSServer and TestServerWebApp are failing on trunk. (tucu) + HttpFS server should check that upload requests have correct + content-type. (tucu) + Release 0.23.1 - UNRELEASED INCOMPATIBLE CHANGES