HADOOP-6562. FileContextSymlinkBaseTest should use FileContextTestHelper. Contributed by Eli Collins
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1035162 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
ee08b2c803
commit
7f138d0f74
@ -177,6 +177,8 @@ Trunk (unreleased changes)
|
||||
|
||||
HADOOP-7032. Assert type constraints in the FileStatus constructor. (eli)
|
||||
|
||||
HADOOP-6562. FileContextSymlinkBaseTest should use FileContextTestHelper. (eli)
|
||||
|
||||
OPTIMIZATIONS
|
||||
|
||||
HADOOP-6884. Add LOG.isDebugEnabled() guard for each LOG.debug(..).
|
||||
|
@ -19,8 +19,6 @@
|
||||
|
||||
import java.io.*;
|
||||
import java.net.URI;
|
||||
import java.util.Iterator;
|
||||
import java.util.Random;
|
||||
import java.util.EnumSet;
|
||||
import org.apache.hadoop.fs.FileContext;
|
||||
import org.apache.hadoop.fs.Options.CreateOpts;
|
||||
@ -29,7 +27,6 @@
|
||||
import org.apache.hadoop.fs.CreateFlag;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.FSDataInputStream;
|
||||
import static org.apache.hadoop.fs.FileContextTestHelper.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
@ -48,8 +45,8 @@ public abstract class FileContextSymlinkBaseTest {
|
||||
protected static FileContext fc;
|
||||
|
||||
abstract protected String getScheme();
|
||||
abstract protected String testBaseDir1();
|
||||
abstract protected String testBaseDir2();
|
||||
abstract protected String testBaseDir1() throws IOException;
|
||||
abstract protected String testBaseDir2() throws IOException;
|
||||
abstract protected URI testURI();
|
||||
|
||||
protected IOException unwrapException(IOException e) {
|
||||
@ -58,44 +55,27 @@ protected IOException unwrapException(IOException e) {
|
||||
|
||||
protected static void createAndWriteFile(FileContext fc, Path p)
|
||||
throws IOException {
|
||||
FSDataOutputStream out;
|
||||
out = fc.create(p, EnumSet.of(CreateFlag.CREATE),
|
||||
createFile(fc, p, fileSize / blockSize,
|
||||
CreateOpts.createParent(),
|
||||
CreateOpts.repFac((short) 1),
|
||||
CreateOpts.blockSize(blockSize));
|
||||
byte[] buf = new byte[fileSize];
|
||||
Random rand = new Random(seed);
|
||||
rand.nextBytes(buf);
|
||||
out.write(buf);
|
||||
out.close();
|
||||
}
|
||||
|
||||
protected static void createAndWriteFile(Path p) throws IOException {
|
||||
createAndWriteFile(fc, p);
|
||||
}
|
||||
|
||||
protected void readFile(Path p) throws IOException {
|
||||
FSDataInputStream out = fc.open(p);
|
||||
byte[] actual = new byte[fileSize];
|
||||
out.readFully(actual);
|
||||
out.close();
|
||||
protected static void readFile(Path p) throws IOException {
|
||||
FileContextTestHelper.readFile(fc, p, fileSize);
|
||||
}
|
||||
|
||||
protected void readFile(FileContext fc, Path p) throws IOException {
|
||||
FSDataInputStream out = fc.open(p);
|
||||
byte[] actual = new byte[fileSize];
|
||||
out.readFully(actual);
|
||||
out.close();
|
||||
protected static void readFile(FileContext fc, Path p) throws IOException {
|
||||
FileContextTestHelper.readFile(fc, p, fileSize);
|
||||
}
|
||||
|
||||
protected void appendToFile(Path p) throws IOException {
|
||||
FSDataOutputStream out;
|
||||
out = fc.create(p, EnumSet.of(CreateFlag.APPEND));
|
||||
byte[] buf = new byte[fileSize];
|
||||
Random rand = new Random(seed);
|
||||
rand.nextBytes(buf);
|
||||
out.write(buf);
|
||||
out.close();
|
||||
protected static void appendToFile(Path p) throws IOException {
|
||||
FileContextTestHelper.appendToFile(fc, p, fileSize / blockSize,
|
||||
CreateOpts.blockSize(blockSize));
|
||||
}
|
||||
|
||||
@Before
|
||||
|
@ -17,7 +17,6 @@
|
||||
*/
|
||||
package org.apache.hadoop.fs;
|
||||
|
||||
|
||||
import java.io.DataInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.FileNotFoundException;
|
||||
@ -127,11 +126,25 @@ public static void createFileNonRecursive(FileContext fc, String name)
|
||||
Path path = getTestRootPath(fc, name);
|
||||
createFileNonRecursive(fc, path);
|
||||
}
|
||||
|
||||
public static void createFileNonRecursive(FileContext fc, Path path)
|
||||
throws IOException {
|
||||
createFile(fc, path, DEFAULT_NUM_BLOCKS, CreateOpts.donotCreateParent());
|
||||
}
|
||||
|
||||
public static void appendToFile(FileContext fc, Path path, int numBlocks,
|
||||
CreateOpts... options) throws IOException {
|
||||
BlockSize blockSizeOpt =
|
||||
(BlockSize) CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
|
||||
long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue()
|
||||
: DEFAULT_BLOCK_SIZE;
|
||||
FSDataOutputStream out;
|
||||
out = fc.create(path, EnumSet.of(CreateFlag.APPEND));
|
||||
byte[] data = getFileData(numBlocks, blockSize);
|
||||
out.write(data, 0, data.length);
|
||||
out.close();
|
||||
}
|
||||
|
||||
public static boolean exists(FileContext fc, Path p) throws IOException {
|
||||
return fc.util().exists(p);
|
||||
}
|
||||
@ -161,7 +174,7 @@ public static boolean isSymlink(FileContext fc, Path p) throws IOException {
|
||||
}
|
||||
|
||||
public static void writeFile(FileContext fc, Path path, byte b[])
|
||||
throws Exception {
|
||||
throws IOException {
|
||||
FSDataOutputStream out =
|
||||
fc.create(path,EnumSet.of(CreateFlag.CREATE), CreateOpts.createParent());
|
||||
out.write(b);
|
||||
@ -169,13 +182,14 @@ public static void writeFile(FileContext fc, Path path, byte b[])
|
||||
}
|
||||
|
||||
public static byte[] readFile(FileContext fc, Path path, int len)
|
||||
throws Exception {
|
||||
throws IOException {
|
||||
DataInputStream dis = fc.open(path);
|
||||
byte[] buffer = new byte[len];
|
||||
IOUtils.readFully(dis, buffer, 0, len);
|
||||
dis.close();
|
||||
return buffer;
|
||||
}
|
||||
|
||||
public static FileStatus containsPath(FileContext fc, Path path,
|
||||
FileStatus[] dirList)
|
||||
throws IOException {
|
||||
|
@ -25,6 +25,7 @@
|
||||
import org.apache.hadoop.fs.permission.FsPermission;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
import static org.apache.hadoop.fs.FileContextTestHelper.*;
|
||||
import static org.junit.Assert.*;
|
||||
import org.junit.Test;
|
||||
import org.junit.Before;
|
||||
@ -38,12 +39,12 @@ protected String getScheme() {
|
||||
return "file";
|
||||
}
|
||||
|
||||
protected String testBaseDir1() {
|
||||
return "/tmp/test1";
|
||||
protected String testBaseDir1() throws IOException {
|
||||
return getAbsoluteTestRootDir(fc)+"/test1";
|
||||
}
|
||||
|
||||
protected String testBaseDir2() {
|
||||
return "/tmp/test2";
|
||||
protected String testBaseDir2() throws IOException {
|
||||
return getAbsoluteTestRootDir(fc)+"/test2";
|
||||
}
|
||||
|
||||
protected URI testURI() {
|
||||
@ -158,7 +159,7 @@ public void testGetLinkStatusPartQualTarget() throws IOException {
|
||||
// RawLocalFs only maintains the path part, not the URI, and
|
||||
// therefore does not support links to other file systems.
|
||||
Path anotherFs = new Path("hdfs://host:1000/dir/file");
|
||||
FileUtil.fullyDelete(new File("/tmp/test2/linkToFile"));
|
||||
FileUtil.fullyDelete(new File(linkNew.toString()));
|
||||
try {
|
||||
fc.createSymlink(anotherFs, linkNew, false);
|
||||
fail("Created a local fs link to a non-local fs");
|
||||
|
Loading…
Reference in New Issue
Block a user