diff --git a/CHANGES.txt b/CHANGES.txt index ee00e5f153..b0637282d9 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -11,9 +11,7 @@ Trunk (unreleased changes) OPTIMIZATIONS BUG FIXES - HADOOP-6274. Fix TestLocalFSFileContextMainOperations test failure. (Gary Murry via suresh). - Release 0.21.0 - Unreleased INCOMPATIBLE CHANGES @@ -618,6 +616,9 @@ Release 0.21.0 - Unreleased HADOOP-6271. Add recursive and non recursive create and mkdir to FileContext. (Sanjay Radia via suresh) + HADOOP-6261. Add URI based tests for FileContext. + (Ravi Pulari via suresh). + BUG FIXES HADOOP-5379. CBZip2InputStream to throw IOException on data crc error. @@ -1075,6 +1076,9 @@ Release 0.21.0 - Unreleased html characters in the parameters, to prevent cross site scripting attacks. (omalley) + HADOOP-6274. Fix TestLocalFSFileContextMainOperations test failure. + (Gary Murry via suresh). + Release 0.20.1 - 2009-09-01 INCOMPATIBLE CHANGES diff --git a/src/test/core/org/apache/hadoop/fs/FileContextURIBase.java b/src/test/core/org/apache/hadoop/fs/FileContextURIBase.java new file mode 100644 index 0000000000..d72f1ef320 --- /dev/null +++ b/src/test/core/org/apache/hadoop/fs/FileContextURIBase.java @@ -0,0 +1,535 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.fs; + +import java.io.*; +import java.util.ArrayList; +import java.util.EnumSet; + +import junit.framework.Assert; + +import org.apache.hadoop.fs.permission.FsPermission; +import org.junit.After; +import org.junit.Test; + +/** + *

+ * A collection of tests for the {@link FileContext} to test path names passed + * as URIs. This test should be used for testing an instance of FileContext that + * has been initialized to a specific default FileSystem such a LocalFileSystem, + * HDFS,S3, etc, and where path names are passed that are URIs in a different + * FileSystem. + *

+ * + *

+ * To test a given {@link FileSystem} implementation create a subclass of this + * test and override {@link #setUp()} to initialize the fc1 and + * fc2 + * + * The tests will do operations on fc1 that use a URI in fc2 + * + * {@link FileContext} instance variable. + *

+ */ +public abstract class FileContextURIBase { + private static final String basePath = System.getProperty("test.build.data", + "build/test/data") + "/testContextURI"; + private static final Path BASE = new Path(basePath); + protected FileContext fc1; + protected FileContext fc2; + + private static int BLOCK_SIZE = 1024; + + private static byte[] data = new byte[BLOCK_SIZE * 2]; // two blocks of data + { + for (int i = 0; i < data.length; i++) { + data[i] = (byte) (i % 10); + } + } + + //Helper method to make path qualified + protected Path qualifiedPath(String path, FileContext fc) { + return fc.makeQualified(new Path(BASE, path)); + } + + // Helper method to create file and write data to file + protected void createFile(Path path, FileContext fc) throws IOException { + FSDataOutputStream out = fc.create(path, EnumSet.of(CreateFlag.CREATE), + Options.CreateOpts.createParent()); + out.write(data, 0, data.length); + out.close(); + } + + @After + public void tearDown() throws Exception { + // Clean up after test completion + // No need to clean fc1 as fc1 and fc2 points same location + fc2.delete(BASE, true); + } + + @Test + public void testCreateFile() throws IOException { + String fileNames[] = { + "testFile", "test File", + "test*File", "test#File", + "test1234", "1234Test", + "test)File", "test_File", + "()&^%$#@!~_+}{> 0); + Assert.assertTrue(fc2Status.getRemaining() > 0); + Assert.assertTrue(fc2Status.getUsed() > 0); + + } + + @Test + public void testGetFileStatusThrowsExceptionForNonExistentFile() + throws Exception { + String testFile = "test/hadoop/fileDoesNotExist"; + Path testPath = qualifiedPath(testFile, fc2); + try { + fc1.getFileStatus(testPath); + Assert.fail("Should throw FileNotFoundException"); + } catch (FileNotFoundException e) { + // expected + } + } + + @Test + public void testListStatusThrowsExceptionForNonExistentFile() + throws Exception { + String testFile = "test/hadoop/file"; + Path testPath = qualifiedPath(testFile, fc2); + try { + fc1.listStatus(testPath); + Assert.fail("Should throw FileNotFoundException"); + } catch (FileNotFoundException fnfe) { + // expected + } + } + + + @Test + public void testListStatus() throws Exception { + final String hPrefix = "test/hadoop"; + final String[] dirs = { + hPrefix + "/a", + hPrefix + "/b", + hPrefix + "/c", + hPrefix + "/1", + hPrefix + "/#@#@", + hPrefix + "/&*#$#$@234"}; + ArrayList testDirs = new ArrayList(); + + for (String d : dirs) { + testDirs.add(qualifiedPath(d, fc2)); + } + Assert.assertFalse(fc1.exists(testDirs.get(0))); + + for (Path path : testDirs) { + fc1.mkdir(path, FsPermission.getDefault(), true); + } + + FileStatus[] paths = fc1.listStatus(qualifiedPath("test", fc1)); + Assert.assertEquals(1, paths.length); + Assert.assertEquals(qualifiedPath(hPrefix, fc1), paths[0].getPath()); + + paths = fc1.listStatus(qualifiedPath(hPrefix, fc1)); + Assert.assertEquals(6, paths.length); + for (int i = 0; i < dirs.length; i++) { + boolean found = false; + for (int j = 0; j < paths.length; j++) { + if (qualifiedPath(dirs[i],fc1).equals(paths[j].getPath())) { + found = true; + } + } + Assert.assertTrue(dirs[i] + " not found", found); + } + + paths = fc1.listStatus(qualifiedPath(dirs[0], fc1)); + Assert.assertEquals(0, paths.length); + } +} diff --git a/src/test/core/org/apache/hadoop/fs/TestLocal_S3FileContextURI.java b/src/test/core/org/apache/hadoop/fs/TestLocal_S3FileContextURI.java new file mode 100644 index 0000000000..2dac557c71 --- /dev/null +++ b/src/test/core/org/apache/hadoop/fs/TestLocal_S3FileContextURI.java @@ -0,0 +1,37 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.fs; + + +import org.apache.hadoop.conf.Configuration; +import org.junit.Before; + +public class TestLocal_S3FileContextURI extends FileContextURIBase { + + @Before + public void setUp() throws Exception { + Configuration S3Conf = new Configuration(); + Configuration localConf = new Configuration(); + + S3Conf.set(FsConfig.FS_DEFAULT_NAME, S3Conf.get("test.fs.s3.name")); + fc1 = FileContext.getFileContext(S3Conf); + fc2 = FileContext.getFileContext(localConf); + } + +} diff --git a/src/test/core/org/apache/hadoop/fs/TestS3_LocalFileContextURI.java b/src/test/core/org/apache/hadoop/fs/TestS3_LocalFileContextURI.java new file mode 100644 index 0000000000..16c9e41702 --- /dev/null +++ b/src/test/core/org/apache/hadoop/fs/TestS3_LocalFileContextURI.java @@ -0,0 +1,38 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.fs; + + +import org.apache.hadoop.conf.Configuration; +import org.junit.Before; + +public class TestS3_LocalFileContextURI extends FileContextURIBase { + + @Before + public void setUp() throws Exception { + + Configuration localConf = new Configuration(); + fc2 = FileContext.getFileContext(localConf); + + Configuration s3conf = new Configuration(); + s3conf.set(FsConfig.FS_DEFAULT_NAME, s3conf.get("test.fs.s3.name")); + fc1 = FileContext.getFileContext(s3conf); + + } +}