HDFS-3113 amendment, removing incorrectly committed files

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1365990 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Alejandro Abdelnur 2012-07-26 13:42:26 +00:00
parent 4d4560189a
commit c5fe08e0ea
3 changed files with 0 additions and 867 deletions

View File

@ -1,513 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.http.client;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Writer;
import java.net.URL;
import java.security.PrivilegedExceptionAction;
import java.util.Arrays;
import java.util.Collection;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FileChecksum;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.http.server.HttpFSServerWebApp;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.test.HFSTestCase;
import org.apache.hadoop.test.HadoopUsersConfTestHelper;
import org.apache.hadoop.test.TestDir;
import org.apache.hadoop.test.TestDirHelper;
import org.apache.hadoop.test.TestHdfs;
import org.apache.hadoop.test.TestHdfsHelper;
import org.apache.hadoop.test.TestJetty;
import org.apache.hadoop.test.TestJettyHelper;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.mortbay.jetty.Server;
import org.mortbay.jetty.webapp.WebAppContext;
@RunWith(value = Parameterized.class)
public class TestHttpFSFileSystem extends HFSTestCase {
private void createHttpFSServer() throws Exception {
File homeDir = TestDirHelper.getTestDir();
Assert.assertTrue(new File(homeDir, "conf").mkdir());
Assert.assertTrue(new File(homeDir, "log").mkdir());
Assert.assertTrue(new File(homeDir, "temp").mkdir());
HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());
File secretFile = new File(new File(homeDir, "conf"), "secret");
Writer w = new FileWriter(secretFile);
w.write("secret");
w.close();
//HDFS configuration
String fsDefaultName = TestHdfsHelper.getHdfsConf().get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY);
Configuration conf = new Configuration(false);
conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fsDefaultName);
File hdfsSite = new File(new File(homeDir, "conf"), "hdfs-site.xml");
OutputStream os = new FileOutputStream(hdfsSite);
conf.writeXml(os);
os.close();
//HTTPFS configuration
conf = new Configuration(false);
conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".groups",
HadoopUsersConfTestHelper.getHadoopProxyUserGroups());
conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
os = new FileOutputStream(httpfsSite);
conf.writeXml(os);
os.close();
ClassLoader cl = Thread.currentThread().getContextClassLoader();
URL url = cl.getResource("webapp");
WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs");
Server server = TestJettyHelper.getJettyServer();
server.addHandler(context);
server.start();
}
protected FileSystem getHttpFileSystem() throws Exception {
Configuration conf = new Configuration();
conf.set("fs.http.impl", HttpFSFileSystem.class.getName());
return FileSystem.get(TestJettyHelper.getJettyURL().toURI(), conf);
}
protected void testGet() throws Exception {
FileSystem fs = getHttpFileSystem();
Assert.assertNotNull(fs);
Assert.assertEquals(fs.getUri(), TestJettyHelper.getJettyURL().toURI());
fs.close();
}
private void testOpen() throws Exception {
FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
OutputStream os = fs.create(path);
os.write(1);
os.close();
fs.close();
fs = getHttpFileSystem();
InputStream is = fs.open(new Path(path.toUri().getPath()));
Assert.assertEquals(is.read(), 1);
is.close();
fs.close();
}
private void testCreate(Path path, boolean override) throws Exception {
FileSystem fs = getHttpFileSystem();
FsPermission permission = new FsPermission(FsAction.READ_WRITE, FsAction.NONE, FsAction.NONE);
OutputStream os = fs.create(new Path(path.toUri().getPath()), permission, override, 1024,
(short) 2, 100 * 1024 * 1024, null);
os.write(1);
os.close();
fs.close();
fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
FileStatus status = fs.getFileStatus(path);
Assert.assertEquals(status.getReplication(), 2);
Assert.assertEquals(status.getBlockSize(), 100 * 1024 * 1024);
Assert.assertEquals(status.getPermission(), permission);
InputStream is = fs.open(path);
Assert.assertEquals(is.read(), 1);
is.close();
fs.close();
}
private void testCreate() throws Exception {
Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
testCreate(path, false);
testCreate(path, true);
try {
testCreate(path, false);
Assert.fail();
} catch (IOException ex) {
} catch (Exception ex) {
Assert.fail();
}
}
private void testAppend() throws Exception {
FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
OutputStream os = fs.create(path);
os.write(1);
os.close();
fs.close();
fs = getHttpFileSystem();
os = fs.append(new Path(path.toUri().getPath()));
os.write(2);
os.close();
fs.close();
fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
InputStream is = fs.open(path);
Assert.assertEquals(is.read(), 1);
Assert.assertEquals(is.read(), 2);
Assert.assertEquals(is.read(), -1);
is.close();
fs.close();
}
private void testRename() throws Exception {
FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo");
fs.mkdirs(path);
fs.close();
fs = getHttpFileSystem();
Path oldPath = new Path(path.toUri().getPath());
Path newPath = new Path(path.getParent(), "bar");
fs.rename(oldPath, newPath);
fs.close();
fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
Assert.assertFalse(fs.exists(oldPath));
Assert.assertTrue(fs.exists(newPath));
fs.close();
}
private void testDelete() throws Exception {
Path foo = new Path(TestHdfsHelper.getHdfsTestDir(), "foo");
Path bar = new Path(TestHdfsHelper.getHdfsTestDir(), "bar");
Path foe = new Path(TestHdfsHelper.getHdfsTestDir(), "foe");
FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
fs.mkdirs(foo);
fs.mkdirs(new Path(bar, "a"));
fs.mkdirs(foe);
FileSystem hoopFs = getHttpFileSystem();
Assert.assertTrue(hoopFs.delete(new Path(foo.toUri().getPath()), false));
Assert.assertFalse(fs.exists(foo));
try {
hoopFs.delete(new Path(bar.toUri().getPath()), false);
Assert.fail();
} catch (IOException ex) {
} catch (Exception ex) {
Assert.fail();
}
Assert.assertTrue(fs.exists(bar));
Assert.assertTrue(hoopFs.delete(new Path(bar.toUri().getPath()), true));
Assert.assertFalse(fs.exists(bar));
Assert.assertTrue(fs.exists(foe));
Assert.assertTrue(hoopFs.delete(foe, true));
Assert.assertFalse(fs.exists(foe));
hoopFs.close();
fs.close();
}
private void testListStatus() throws Exception {
FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
OutputStream os = fs.create(path);
os.write(1);
os.close();
FileStatus status1 = fs.getFileStatus(path);
fs.close();
fs = getHttpFileSystem();
FileStatus status2 = fs.getFileStatus(new Path(path.toUri().getPath()));
fs.close();
Assert.assertEquals(status2.getPermission(), status1.getPermission());
Assert.assertEquals(status2.getPath().toUri().getPath(), status1.getPath().toUri().getPath());
Assert.assertEquals(status2.getReplication(), status1.getReplication());
Assert.assertEquals(status2.getBlockSize(), status1.getBlockSize());
Assert.assertEquals(status2.getAccessTime(), status1.getAccessTime());
Assert.assertEquals(status2.getModificationTime(), status1.getModificationTime());
Assert.assertEquals(status2.getOwner(), status1.getOwner());
Assert.assertEquals(status2.getGroup(), status1.getGroup());
Assert.assertEquals(status2.getLen(), status1.getLen());
FileStatus[] stati = fs.listStatus(path.getParent());
Assert.assertEquals(stati.length, 1);
Assert.assertEquals(stati[0].getPath().getName(), path.getName());
}
private void testWorkingdirectory() throws Exception {
FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
Path workingDir = fs.getWorkingDirectory();
fs.close();
fs = getHttpFileSystem();
Path hoopWorkingDir = fs.getWorkingDirectory();
fs.close();
Assert.assertEquals(hoopWorkingDir.toUri().getPath(), workingDir.toUri().getPath());
fs = getHttpFileSystem();
fs.setWorkingDirectory(new Path("/tmp"));
workingDir = fs.getWorkingDirectory();
fs.close();
Assert.assertEquals(workingDir.toUri().getPath(), new Path("/tmp").toUri().getPath());
}
private void testMkdirs() throws Exception {
Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo");
FileSystem fs = getHttpFileSystem();
fs.mkdirs(path);
fs.close();
fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
Assert.assertTrue(fs.exists(path));
fs.close();
}
private void testSetTimes() throws Exception {
FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
OutputStream os = fs.create(path);
os.write(1);
os.close();
FileStatus status1 = fs.getFileStatus(path);
fs.close();
long at = status1.getAccessTime();
long mt = status1.getModificationTime();
fs = getHttpFileSystem();
fs.setTimes(path, mt + 10, at + 20);
fs.close();
fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
status1 = fs.getFileStatus(path);
fs.close();
long atNew = status1.getAccessTime();
long mtNew = status1.getModificationTime();
Assert.assertEquals(mtNew, mt + 10);
Assert.assertEquals(atNew, at + 20);
}
private void testSetPermission() throws Exception {
FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foodir");
fs.mkdirs(path);
fs = getHttpFileSystem();
FsPermission permission1 = new FsPermission(FsAction.READ_WRITE, FsAction.NONE, FsAction.NONE);
fs.setPermission(path, permission1);
fs.close();
fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
FileStatus status1 = fs.getFileStatus(path);
fs.close();
FsPermission permission2 = status1.getPermission();
Assert.assertEquals(permission2, permission1);
//sticky bit
fs = getHttpFileSystem();
permission1 = new FsPermission(FsAction.READ_WRITE, FsAction.NONE, FsAction.NONE, true);
fs.setPermission(path, permission1);
fs.close();
fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
status1 = fs.getFileStatus(path);
fs.close();
permission2 = status1.getPermission();
Assert.assertTrue(permission2.getStickyBit());
Assert.assertEquals(permission2, permission1);
}
private void testSetOwner() throws Exception {
FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
OutputStream os = fs.create(path);
os.write(1);
os.close();
fs.close();
fs = getHttpFileSystem();
String user = HadoopUsersConfTestHelper.getHadoopUsers()[1];
String group = HadoopUsersConfTestHelper.getHadoopUserGroups(user)[0];
fs.setOwner(path, user, group);
fs.close();
fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
FileStatus status1 = fs.getFileStatus(path);
fs.close();
Assert.assertEquals(status1.getOwner(), user);
Assert.assertEquals(status1.getGroup(), group);
}
private void testSetReplication() throws Exception {
FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
OutputStream os = fs.create(path);
os.write(1);
os.close();
fs.close();
fs.setReplication(path, (short) 2);
fs = getHttpFileSystem();
fs.setReplication(path, (short) 1);
fs.close();
fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
FileStatus status1 = fs.getFileStatus(path);
fs.close();
Assert.assertEquals(status1.getReplication(), (short) 1);
}
private void testChecksum() throws Exception {
FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
OutputStream os = fs.create(path);
os.write(1);
os.close();
FileChecksum hdfsChecksum = fs.getFileChecksum(path);
fs.close();
fs = getHttpFileSystem();
FileChecksum httpChecksum = fs.getFileChecksum(path);
fs.close();
Assert.assertEquals(httpChecksum.getAlgorithmName(), hdfsChecksum.getAlgorithmName());
Assert.assertEquals(httpChecksum.getLength(), hdfsChecksum.getLength());
Assert.assertArrayEquals(httpChecksum.getBytes(), hdfsChecksum.getBytes());
}
private void testContentSummary() throws Exception {
FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
Path path = new Path(TestHdfsHelper.getHdfsTestDir(), "foo.txt");
OutputStream os = fs.create(path);
os.write(1);
os.close();
ContentSummary hdfsContentSummary = fs.getContentSummary(path);
fs.close();
fs = getHttpFileSystem();
ContentSummary httpContentSummary = fs.getContentSummary(path);
fs.close();
Assert.assertEquals(httpContentSummary.getDirectoryCount(), hdfsContentSummary.getDirectoryCount());
Assert.assertEquals(httpContentSummary.getFileCount(), hdfsContentSummary.getFileCount());
Assert.assertEquals(httpContentSummary.getLength(), hdfsContentSummary.getLength());
Assert.assertEquals(httpContentSummary.getQuota(), hdfsContentSummary.getQuota());
Assert.assertEquals(httpContentSummary.getSpaceConsumed(), hdfsContentSummary.getSpaceConsumed());
Assert.assertEquals(httpContentSummary.getSpaceQuota(), hdfsContentSummary.getSpaceQuota());
}
protected enum Operation {
GET, OPEN, CREATE, APPEND, RENAME, DELETE, LIST_STATUS, WORKING_DIRECTORY, MKDIRS,
SET_TIMES, SET_PERMISSION, SET_OWNER, SET_REPLICATION, CHECKSUM, CONTENT_SUMMARY
}
private void operation(Operation op) throws Exception {
switch (op) {
case GET:
testGet();
break;
case OPEN:
testOpen();
break;
case CREATE:
testCreate();
break;
case APPEND:
testAppend();
break;
case RENAME:
testRename();
break;
case DELETE:
testDelete();
break;
case LIST_STATUS:
testListStatus();
break;
case WORKING_DIRECTORY:
testWorkingdirectory();
break;
case MKDIRS:
testMkdirs();
break;
case SET_TIMES:
testSetTimes();
break;
case SET_PERMISSION:
testSetPermission();
break;
case SET_OWNER:
testSetOwner();
break;
case SET_REPLICATION:
testSetReplication();
break;
case CHECKSUM:
testChecksum();
break;
case CONTENT_SUMMARY:
testContentSummary();
break;
}
}
@Parameterized.Parameters
public static Collection operations() {
Object[][] ops = new Object[Operation.values().length][];
for (int i = 0; i < Operation.values().length; i++) {
ops[i] = new Object[]{Operation.values()[i]};
}
return Arrays.asList(ops);
// return Arrays.asList(new Object[][]{ new Object[]{Operation.CREATE}});
}
private Operation operation;
public TestHttpFSFileSystem(Operation operation) {
this.operation = operation;
}
@Test
@TestDir
@TestJetty
@TestHdfs
public void testOperation() throws Exception {
createHttpFSServer();
operation(operation);
}
@Test
@TestDir
@TestJetty
@TestHdfs
public void testOperationDoAs() throws Exception {
createHttpFSServer();
UserGroupInformation ugi = UserGroupInformation.createProxyUser(HadoopUsersConfTestHelper.getHadoopUsers()[0],
UserGroupInformation.getCurrentUser());
ugi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
operation(operation);
return null;
}
});
}
}

View File

@ -1,236 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.http.server;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.Writer;
import java.net.HttpURLConnection;
import java.net.URL;
import java.text.MessageFormat;
import java.util.Arrays;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.lib.server.Service;
import org.apache.hadoop.lib.server.ServiceException;
import org.apache.hadoop.lib.service.Groups;
import org.apache.hadoop.test.HFSTestCase;
import org.apache.hadoop.test.HadoopUsersConfTestHelper;
import org.apache.hadoop.test.TestDir;
import org.apache.hadoop.test.TestDirHelper;
import org.apache.hadoop.test.TestHdfs;
import org.apache.hadoop.test.TestHdfsHelper;
import org.apache.hadoop.test.TestJetty;
import org.apache.hadoop.test.TestJettyHelper;
import org.junit.Test;
import org.mortbay.jetty.Server;
import org.mortbay.jetty.webapp.WebAppContext;
public class TestHttpFSServer extends HFSTestCase {
@Test
@TestDir
@TestJetty
public void server() throws Exception {
String dir = TestDirHelper.getTestDir().getAbsolutePath();
Configuration httpfsConf = new Configuration(false);
HttpFSServerWebApp server = new HttpFSServerWebApp(dir, dir, dir, dir, httpfsConf);
server.init();
server.destroy();
}
public static class MockGroups implements Service,Groups {
@Override
public void init(org.apache.hadoop.lib.server.Server server) throws ServiceException {
}
@Override
public void postInit() throws ServiceException {
}
@Override
public void destroy() {
}
@Override
public Class[] getServiceDependencies() {
return new Class[0];
}
@Override
public Class getInterface() {
return Groups.class;
}
@Override
public void serverStatusChange(org.apache.hadoop.lib.server.Server.Status oldStatus,
org.apache.hadoop.lib.server.Server.Status newStatus) throws ServiceException {
}
@Override
public List<String> getGroups(String user) throws IOException {
return Arrays.asList(HadoopUsersConfTestHelper.getHadoopUserGroups(user));
}
}
private void createHttpFSServer() throws Exception {
File homeDir = TestDirHelper.getTestDir();
assertTrue(new File(homeDir, "conf").mkdir());
assertTrue(new File(homeDir, "log").mkdir());
assertTrue(new File(homeDir, "temp").mkdir());
HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());
File secretFile = new File(new File(homeDir, "conf"), "secret");
Writer w = new FileWriter(secretFile);
w.write("secret");
w.close();
//HDFS configuration
File hadoopConfDir = new File(new File(homeDir, "conf"), "hadoop-conf");
hadoopConfDir.mkdirs();
String fsDefaultName = TestHdfsHelper.getHdfsConf().get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY);
Configuration conf = new Configuration(false);
conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fsDefaultName);
File hdfsSite = new File(hadoopConfDir, "hdfs-site.xml");
OutputStream os = new FileOutputStream(hdfsSite);
conf.writeXml(os);
os.close();
//HTTPFS configuration
conf = new Configuration(false);
conf.set("httpfs.services.ext", MockGroups.class.getName());
conf.set("httpfs.admin.group", HadoopUsersConfTestHelper.
getHadoopUserGroups(HadoopUsersConfTestHelper.getHadoopUsers()[0])[0]);
conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".groups",
HadoopUsersConfTestHelper.getHadoopProxyUserGroups());
conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
os = new FileOutputStream(httpfsSite);
conf.writeXml(os);
os.close();
ClassLoader cl = Thread.currentThread().getContextClassLoader();
URL url = cl.getResource("webapp");
WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs");
Server server = TestJettyHelper.getJettyServer();
server.addHandler(context);
server.start();
}
@Test
@TestDir
@TestJetty
@TestHdfs
public void instrumentation() throws Exception {
createHttpFSServer();
URL url = new URL(TestJettyHelper.getJettyURL(),
MessageFormat.format("/webhdfs/v1?user.name={0}&op=instrumentation", "nobody"));
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_UNAUTHORIZED);
url = new URL(TestJettyHelper.getJettyURL(),
MessageFormat.format("/webhdfs/v1?user.name={0}&op=instrumentation",
HadoopUsersConfTestHelper.getHadoopUsers()[0]));
conn = (HttpURLConnection) url.openConnection();
assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
String line = reader.readLine();
reader.close();
assertTrue(line.contains("\"counters\":{"));
url = new URL(TestJettyHelper.getJettyURL(),
MessageFormat.format("/webhdfs/v1/foo?user.name={0}&op=instrumentation",
HadoopUsersConfTestHelper.getHadoopUsers()[0]));
conn = (HttpURLConnection) url.openConnection();
assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_BAD_REQUEST);
}
@Test
@TestDir
@TestJetty
@TestHdfs
public void testHdfsAccess() throws Exception {
createHttpFSServer();
String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
URL url = new URL(TestJettyHelper.getJettyURL(),
MessageFormat.format("/webhdfs/v1/?user.name={0}&op=liststatus", user));
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
reader.readLine();
reader.close();
}
@Test
@TestDir
@TestJetty
@TestHdfs
public void testGlobFilter() throws Exception {
createHttpFSServer();
FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
fs.mkdirs(new Path("/tmp"));
fs.create(new Path("/tmp/foo.txt")).close();
String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
URL url = new URL(TestJettyHelper.getJettyURL(),
MessageFormat.format("/webhdfs/v1/tmp?user.name={0}&op=liststatus&filter=f*", user));
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
reader.readLine();
reader.close();
}
@Test
@TestDir
@TestJetty
@TestHdfs
public void testPutNoOperation() throws Exception {
createHttpFSServer();
String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
URL url = new URL(TestJettyHelper.getJettyURL(),
MessageFormat.format("/webhdfs/v1/foo?user.name={0}", user));
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setDoInput(true);
conn.setDoOutput(true);
conn.setRequestMethod("PUT");
assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_BAD_REQUEST);
}
}

View File

@ -1,118 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.test;
import java.net.InetAddress;
import java.net.MalformedURLException;
import java.net.ServerSocket;
import java.net.URL;
import org.junit.Test;
import org.junit.rules.MethodRule;
import org.junit.runners.model.FrameworkMethod;
import org.junit.runners.model.Statement;
import org.mortbay.jetty.Server;
public class TestJettyHelper implements MethodRule {
@Test
public void dummy() {
}
private static ThreadLocal<Server> TEST_SERVLET_TL = new InheritableThreadLocal<Server>();
@Override
public Statement apply(final Statement statement, final FrameworkMethod frameworkMethod, final Object o) {
return new Statement() {
@Override
public void evaluate() throws Throwable {
Server server = null;
TestJetty testJetty = frameworkMethod.getAnnotation(TestJetty.class);
if (testJetty != null) {
server = createJettyServer();
}
try {
TEST_SERVLET_TL.set(server);
statement.evaluate();
} finally {
TEST_SERVLET_TL.remove();
if (server != null && server.isRunning()) {
try {
server.stop();
} catch (Exception ex) {
throw new RuntimeException("Could not stop embedded servlet container, " + ex.getMessage(), ex);
}
}
}
}
};
}
private Server createJettyServer() {
try {
String host = InetAddress.getLocalHost().getHostName();
ServerSocket ss = new ServerSocket(0);
int port = ss.getLocalPort();
ss.close();
Server server = new Server(0);
server.getConnectors()[0].setHost(host);
server.getConnectors()[0].setPort(port);
return server;
} catch (Exception ex) {
throw new RuntimeException("Could not stop embedded servlet container, " + ex.getMessage(), ex);
}
}
/**
* Returns a Jetty server ready to be configured and the started. This server
* is only available when the test method has been annotated with
* {@link TestJetty}. Refer to {@link HTestCase} header for details.
* <p/>
* Once configured, the Jetty server should be started. The server will be
* automatically stopped when the test method ends.
*
* @return a Jetty server ready to be configured and the started.
*/
public static Server getJettyServer() {
Server server = TEST_SERVLET_TL.get();
if (server == null) {
throw new IllegalStateException("This test does not use @TestJetty");
}
return server;
}
/**
* Returns the base URL (SCHEMA://HOST:PORT) of the test Jetty server
* (see {@link #getJettyServer()}) once started.
*
* @return the base URL (SCHEMA://HOST:PORT) of the test Jetty server.
*/
public static URL getJettyURL() {
Server server = TEST_SERVLET_TL.get();
if (server == null) {
throw new IllegalStateException("This test does not use @TestJetty");
}
try {
return new URL("http://" + server.getConnectors()[0].getHost() + ":" + server.getConnectors()[0].getPort());
} catch (MalformedURLException ex) {
throw new RuntimeException("It should never happen, " + ex.getMessage(), ex);
}
}
}