HADOOP-10426. Declare CreateOpts.getOpt(..) with generic type argument, removes unused FileContext.getFileStatus(..) and fixes various javac warnings.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1581437 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
867e0f8ea9
commit
274d9b2ac4
@ -321,6 +321,10 @@ Release 2.5.0 - UNRELEASED
|
||||
HADOOP-10418. SaslRpcClient should not assume that remote principals are in
|
||||
the default_realm. (atm)
|
||||
|
||||
HADOOP-10426. Declare CreateOpts.getOpt(..) with generic type argument,
|
||||
removes unused FileContext.getFileStatus(..) and fixes various javac
|
||||
warnings. (szetszwo)
|
||||
|
||||
Release 2.4.0 - UNRELEASED
|
||||
|
||||
INCOMPATIBLE CHANGES
|
||||
|
@ -750,13 +750,12 @@ public void createSymlink(final Path target, final Path link,
|
||||
* Partially resolves the path. This is used during symlink resolution in
|
||||
* {@link FSLinkResolver}, and differs from the similarly named method
|
||||
* {@link FileContext#getLinkTarget(Path)}.
|
||||
* @throws IOException subclass implementations may throw IOException
|
||||
*/
|
||||
public Path getLinkTarget(final Path f) throws IOException {
|
||||
/* We should never get here. Any file system that threw an
|
||||
* UnresolvedLinkException, causing this function to be called,
|
||||
* needs to override this method.
|
||||
*/
|
||||
throw new AssertionError();
|
||||
throw new AssertionError("Implementation Error: " + getClass()
|
||||
+ " that threw an UnresolvedLinkException, causing this method to be"
|
||||
+ " called, needs to override this method.");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -33,7 +33,7 @@
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Stable
|
||||
public class FSDataInputStream extends DataInputStream
|
||||
implements Seekable, PositionedReadable, Closeable,
|
||||
implements Seekable, PositionedReadable,
|
||||
ByteBufferReadable, HasFileDescriptor, CanSetDropBehind, CanSetReadahead,
|
||||
HasEnhancedByteBufferAccess {
|
||||
/**
|
||||
@ -44,8 +44,7 @@ public class FSDataInputStream extends DataInputStream
|
||||
extendedReadBuffers
|
||||
= new IdentityHashStore<ByteBuffer, ByteBufferPool>(0);
|
||||
|
||||
public FSDataInputStream(InputStream in)
|
||||
throws IOException {
|
||||
public FSDataInputStream(InputStream in) {
|
||||
super(in);
|
||||
if( !(in instanceof Seekable) || !(in instanceof PositionedReadable) ) {
|
||||
throw new IllegalArgumentException(
|
||||
|
@ -652,8 +652,7 @@ public FSDataOutputStream create(final Path f,
|
||||
// If not, add a default Perms and apply umask;
|
||||
// AbstractFileSystem#create
|
||||
|
||||
CreateOpts.Perms permOpt =
|
||||
(CreateOpts.Perms) CreateOpts.getOpt(CreateOpts.Perms.class, opts);
|
||||
CreateOpts.Perms permOpt = CreateOpts.getOpt(CreateOpts.Perms.class, opts);
|
||||
FsPermission permission = (permOpt != null) ? permOpt.getValue() :
|
||||
FILE_DEFAULT_PERM;
|
||||
permission = permission.applyUMask(umask);
|
||||
@ -1520,40 +1519,6 @@ public boolean exists(final Path f) throws AccessControlException,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a list of file status objects that corresponds to supplied paths
|
||||
* excluding those non-existent paths.
|
||||
*
|
||||
* @param paths list of paths we want information from
|
||||
*
|
||||
* @return a list of FileStatus objects
|
||||
*
|
||||
* @throws AccessControlException If access is denied
|
||||
* @throws IOException If an I/O error occurred
|
||||
*
|
||||
* Exceptions applicable to file systems accessed over RPC:
|
||||
* @throws RpcClientException If an exception occurred in the RPC client
|
||||
* @throws RpcServerException If an exception occurred in the RPC server
|
||||
* @throws UnexpectedServerException If server implementation throws
|
||||
* undeclared exception to RPC server
|
||||
*/
|
||||
private FileStatus[] getFileStatus(Path[] paths)
|
||||
throws AccessControlException, IOException {
|
||||
if (paths == null) {
|
||||
return null;
|
||||
}
|
||||
ArrayList<FileStatus> results = new ArrayList<FileStatus>(paths.length);
|
||||
for (int i = 0; i < paths.length; i++) {
|
||||
try {
|
||||
results.add(FileContext.this.getFileStatus(paths[i]));
|
||||
} catch (FileNotFoundException fnfe) {
|
||||
// ignoring
|
||||
}
|
||||
}
|
||||
return results.toArray(new FileStatus[results.size()]);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Return the {@link ContentSummary} of path f.
|
||||
* @param f path
|
||||
|
@ -53,8 +53,7 @@ protected AbstractFileSystem getMyFs() {
|
||||
return myFs;
|
||||
}
|
||||
|
||||
protected FilterFs(AbstractFileSystem fs) throws IOException,
|
||||
URISyntaxException {
|
||||
protected FilterFs(AbstractFileSystem fs) throws URISyntaxException {
|
||||
super(fs.getUri(), fs.getUri().getScheme(),
|
||||
fs.getUri().getAuthority() != null, fs.getUriDefaultPort());
|
||||
myFs = fs;
|
||||
|
@ -150,21 +150,25 @@ protected CreateParent(boolean createPar) {
|
||||
|
||||
/**
|
||||
* Get an option of desired type
|
||||
* @param theClass is the desired class of the opt
|
||||
* @param clazz is the desired class of the opt
|
||||
* @param opts - not null - at least one opt must be passed
|
||||
* @return an opt from one of the opts of type theClass.
|
||||
* returns null if there isn't any
|
||||
*/
|
||||
protected static CreateOpts getOpt(Class<? extends CreateOpts> theClass, CreateOpts ...opts) {
|
||||
static <T extends CreateOpts> T getOpt(Class<T> clazz, CreateOpts... opts) {
|
||||
if (opts == null) {
|
||||
throw new IllegalArgumentException("Null opt");
|
||||
}
|
||||
CreateOpts result = null;
|
||||
T result = null;
|
||||
for (int i = 0; i < opts.length; ++i) {
|
||||
if (opts[i].getClass() == theClass) {
|
||||
if (result != null)
|
||||
throw new IllegalArgumentException("multiple blocksize varargs");
|
||||
result = opts[i];
|
||||
if (opts[i].getClass() == clazz) {
|
||||
if (result != null) {
|
||||
throw new IllegalArgumentException("multiple opts varargs: " + clazz);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
T t = (T)opts[i];
|
||||
result = t;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
@ -175,14 +179,16 @@ protected static CreateOpts getOpt(Class<? extends CreateOpts> theClass, Create
|
||||
* @param opts - the option is set into this array of opts
|
||||
* @return updated CreateOpts[] == opts + newValue
|
||||
*/
|
||||
protected static <T extends CreateOpts> CreateOpts[] setOpt(T newValue,
|
||||
CreateOpts ...opts) {
|
||||
static <T extends CreateOpts> CreateOpts[] setOpt(final T newValue,
|
||||
final CreateOpts... opts) {
|
||||
final Class<?> clazz = newValue.getClass();
|
||||
boolean alreadyInOpts = false;
|
||||
if (opts != null) {
|
||||
for (int i = 0; i < opts.length; ++i) {
|
||||
if (opts[i].getClass() == newValue.getClass()) {
|
||||
if (alreadyInOpts)
|
||||
throw new IllegalArgumentException("multiple opts varargs");
|
||||
if (opts[i].getClass() == clazz) {
|
||||
if (alreadyInOpts) {
|
||||
throw new IllegalArgumentException("multiple opts varargs: " + clazz);
|
||||
}
|
||||
alreadyInOpts = true;
|
||||
opts[i] = newValue;
|
||||
}
|
||||
@ -190,9 +196,12 @@ protected static <T extends CreateOpts> CreateOpts[] setOpt(T newValue,
|
||||
}
|
||||
CreateOpts[] resultOpt = opts;
|
||||
if (!alreadyInOpts) { // no newValue in opt
|
||||
CreateOpts[] newOpts = new CreateOpts[opts.length + 1];
|
||||
System.arraycopy(opts, 0, newOpts, 0, opts.length);
|
||||
newOpts[opts.length] = newValue;
|
||||
final int oldLength = opts == null? 0: opts.length;
|
||||
CreateOpts[] newOpts = new CreateOpts[oldLength + 1];
|
||||
if (oldLength > 0) {
|
||||
System.arraycopy(opts, 0, newOpts, 0, oldLength);
|
||||
}
|
||||
newOpts[oldLength] = newValue;
|
||||
resultOpt = newOpts;
|
||||
}
|
||||
return resultOpt;
|
||||
@ -273,50 +282,29 @@ public static ChecksumOpt createDisabled() {
|
||||
*/
|
||||
public static ChecksumOpt processChecksumOpt(ChecksumOpt defaultOpt,
|
||||
ChecksumOpt userOpt, int userBytesPerChecksum) {
|
||||
// The following is done to avoid unnecessary creation of new objects.
|
||||
// tri-state variable: 0 default, 1 userBytesPerChecksum, 2 userOpt
|
||||
short whichSize;
|
||||
// true default, false userOpt
|
||||
boolean useDefaultType;
|
||||
|
||||
final boolean useDefaultType;
|
||||
final DataChecksum.Type type;
|
||||
if (userOpt != null
|
||||
&& userOpt.getChecksumType() != DataChecksum.Type.DEFAULT) {
|
||||
useDefaultType = false;
|
||||
type = userOpt.getChecksumType();
|
||||
} else {
|
||||
useDefaultType = true;
|
||||
type = defaultOpt.getChecksumType();
|
||||
}
|
||||
|
||||
// bytesPerChecksum - order of preference
|
||||
// user specified value in bytesPerChecksum
|
||||
// user specified value in checksumOpt
|
||||
// default.
|
||||
if (userBytesPerChecksum > 0) {
|
||||
whichSize = 1; // userBytesPerChecksum
|
||||
} else if (userOpt != null && userOpt.getBytesPerChecksum() > 0) {
|
||||
whichSize = 2; // userOpt
|
||||
} else {
|
||||
whichSize = 0; // default
|
||||
}
|
||||
|
||||
// checksum type - order of preference
|
||||
// user specified value in checksumOpt
|
||||
// default.
|
||||
if (userOpt != null &&
|
||||
userOpt.getChecksumType() != DataChecksum.Type.DEFAULT) {
|
||||
useDefaultType = false;
|
||||
} else {
|
||||
useDefaultType = true;
|
||||
}
|
||||
|
||||
// Short out the common and easy cases
|
||||
if (whichSize == 0 && useDefaultType) {
|
||||
return defaultOpt;
|
||||
} else if (whichSize == 2 && !useDefaultType) {
|
||||
return userOpt;
|
||||
}
|
||||
|
||||
// Take care of the rest of combinations
|
||||
DataChecksum.Type type = useDefaultType ? defaultOpt.getChecksumType() :
|
||||
userOpt.getChecksumType();
|
||||
if (whichSize == 0) {
|
||||
return new ChecksumOpt(type, defaultOpt.getBytesPerChecksum());
|
||||
} else if (whichSize == 1) {
|
||||
return new ChecksumOpt(type, userBytesPerChecksum);
|
||||
} else if (userOpt != null && userOpt.getBytesPerChecksum() > 0) {
|
||||
return !useDefaultType? userOpt
|
||||
: new ChecksumOpt(type, userOpt.getBytesPerChecksum());
|
||||
} else {
|
||||
return new ChecksumOpt(type, userOpt.getBytesPerChecksum());
|
||||
return useDefaultType? defaultOpt
|
||||
: new ChecksumOpt(type, defaultOpt.getBytesPerChecksum());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -24,7 +24,6 @@
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.conf.Configurable;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.conf.Configured;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
@ -35,7 +34,7 @@
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Unstable
|
||||
|
||||
public class CommandFactory extends Configured implements Configurable {
|
||||
public class CommandFactory extends Configured {
|
||||
private Map<String, Class<? extends Command>> classMap =
|
||||
new HashMap<String, Class<? extends Command>>();
|
||||
|
||||
|
@ -18,16 +18,20 @@
|
||||
|
||||
package org.apache.hadoop.fs.shell;
|
||||
|
||||
import java.io.*;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.Arrays;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.fs.*;
|
||||
import org.apache.hadoop.fs.FSDataInputStream;
|
||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.PathIsDirectoryException;
|
||||
import org.apache.hadoop.io.IOUtils;
|
||||
|
||||
/** Various commands for copy files */
|
||||
|
@ -18,12 +18,12 @@
|
||||
package org.apache.hadoop.fs.shell;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.InputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedList;
|
||||
import java.util.zip.GZIPInputStream;
|
||||
|
||||
import org.apache.avro.Schema;
|
||||
import org.apache.avro.file.DataFileReader;
|
||||
import org.apache.avro.file.FileReader;
|
||||
import org.apache.avro.generic.GenericDatumReader;
|
||||
@ -31,7 +31,6 @@
|
||||
import org.apache.avro.io.DatumWriter;
|
||||
import org.apache.avro.io.EncoderFactory;
|
||||
import org.apache.avro.io.JsonEncoder;
|
||||
import org.apache.avro.Schema;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
@ -253,7 +252,7 @@ protected static class AvroFileInputStream extends InputStream {
|
||||
private int pos;
|
||||
private byte[] buffer;
|
||||
private ByteArrayOutputStream output;
|
||||
private FileReader fileReader;
|
||||
private FileReader<?> fileReader;
|
||||
private DatumWriter<Object> writer;
|
||||
private JsonEncoder encoder;
|
||||
|
||||
|
@ -570,7 +570,7 @@ private static URI stringToUri(String pathString) throws IOException {
|
||||
|
||||
@Override
|
||||
public int compareTo(PathData o) {
|
||||
return path.compareTo(((PathData)o).path);
|
||||
return path.compareTo(o.path);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1091,12 +1091,7 @@ private static boolean isSessionExpired(Code code) {
|
||||
}
|
||||
|
||||
private static boolean shouldRetry(Code code) {
|
||||
switch (code) {
|
||||
case CONNECTIONLOSS:
|
||||
case OPERATIONTIMEOUT:
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
return code == Code.CONNECTIONLOSS || code == Code.OPERATIONTIMEOUT;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -19,8 +19,6 @@
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||
|
||||
/**
|
||||
* Singleton to get access to Http related configuration.
|
||||
|
@ -434,7 +434,7 @@ private static void addNoCacheFilter(WebAppContext ctxt) {
|
||||
* provided. This wrapper and all subclasses must create at least one
|
||||
* listener.
|
||||
*/
|
||||
public Connector createBaseListener(Configuration conf) throws IOException {
|
||||
public Connector createBaseListener(Configuration conf) {
|
||||
return HttpServer2.createDefaultChannelConnector();
|
||||
}
|
||||
|
||||
@ -527,8 +527,7 @@ protected void addDefaultServlets() {
|
||||
addServlet("conf", "/conf", ConfServlet.class);
|
||||
}
|
||||
|
||||
public void addContext(Context ctxt, boolean isFiltered)
|
||||
throws IOException {
|
||||
public void addContext(Context ctxt, boolean isFiltered) {
|
||||
webServer.addHandler(ctxt);
|
||||
addNoCacheFilter(webAppContext);
|
||||
defaultContexts.put(ctxt, isFiltered);
|
||||
|
@ -48,11 +48,11 @@ public abstract class AbstractMapWritable implements Writable, Configurable {
|
||||
|
||||
/* Class to id mappings */
|
||||
@VisibleForTesting
|
||||
Map<Class, Byte> classToIdMap = new ConcurrentHashMap<Class, Byte>();
|
||||
Map<Class<?>, Byte> classToIdMap = new ConcurrentHashMap<Class<?>, Byte>();
|
||||
|
||||
/* Id to Class mappings */
|
||||
@VisibleForTesting
|
||||
Map<Byte, Class> idToClassMap = new ConcurrentHashMap<Byte, Class>();
|
||||
Map<Byte, Class<?>> idToClassMap = new ConcurrentHashMap<Byte, Class<?>>();
|
||||
|
||||
/* The number of new classes (those not established by the constructor) */
|
||||
private volatile byte newClasses = 0;
|
||||
@ -65,7 +65,7 @@ byte getNewClasses() {
|
||||
/**
|
||||
* Used to add "predefined" classes and by Writable to copy "new" classes.
|
||||
*/
|
||||
private synchronized void addToMap(Class clazz, byte id) {
|
||||
private synchronized void addToMap(Class<?> clazz, byte id) {
|
||||
if (classToIdMap.containsKey(clazz)) {
|
||||
byte b = classToIdMap.get(clazz);
|
||||
if (b != id) {
|
||||
@ -74,7 +74,7 @@ private synchronized void addToMap(Class clazz, byte id) {
|
||||
}
|
||||
}
|
||||
if (idToClassMap.containsKey(id)) {
|
||||
Class c = idToClassMap.get(id);
|
||||
Class<?> c = idToClassMap.get(id);
|
||||
if (!c.equals(clazz)) {
|
||||
throw new IllegalArgumentException("Id " + id + " exists but maps to " +
|
||||
c.getName() + " and not " + clazz.getName());
|
||||
@ -85,7 +85,7 @@ private synchronized void addToMap(Class clazz, byte id) {
|
||||
}
|
||||
|
||||
/** Add a Class to the maps if it is not already present. */
|
||||
protected synchronized void addToMap(Class clazz) {
|
||||
protected synchronized void addToMap(Class<?> clazz) {
|
||||
if (classToIdMap.containsKey(clazz)) {
|
||||
return;
|
||||
}
|
||||
@ -98,12 +98,12 @@ protected synchronized void addToMap(Class clazz) {
|
||||
}
|
||||
|
||||
/** @return the Class class for the specified id */
|
||||
protected Class getClass(byte id) {
|
||||
protected Class<?> getClass(byte id) {
|
||||
return idToClassMap.get(id);
|
||||
}
|
||||
|
||||
/** @return the id for the specified Class */
|
||||
protected byte getId(Class clazz) {
|
||||
protected byte getId(Class<?> clazz) {
|
||||
return classToIdMap.containsKey(clazz) ? classToIdMap.get(clazz) : -1;
|
||||
}
|
||||
|
||||
|
@ -82,7 +82,7 @@ public boolean equals(Object obj) {
|
||||
}
|
||||
|
||||
if (obj instanceof MapWritable) {
|
||||
Map map = (Map) obj;
|
||||
MapWritable map = (MapWritable) obj;
|
||||
if (size() != map.size()) {
|
||||
return false;
|
||||
}
|
||||
@ -114,7 +114,6 @@ public Set<Writable> keySet() {
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public Writable put(Writable key, Writable value) {
|
||||
addToMap(key.getClass());
|
||||
addToMap(value.getClass());
|
||||
@ -163,7 +162,6 @@ public void write(DataOutput out) throws IOException {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
super.readFields(in);
|
||||
|
@ -77,8 +77,7 @@ public Path getTestRootPath(FileContext fc, String pathString) {
|
||||
// the getAbsolutexxx method is needed because the root test dir
|
||||
// can be messed up by changing the working dir.
|
||||
|
||||
public String getAbsoluteTestRootDir(FileContext fc)
|
||||
throws IOException {
|
||||
public String getAbsoluteTestRootDir(FileContext fc) {
|
||||
if (absTestRootDir == null) {
|
||||
if (new Path(testRootDir).isAbsolute()) {
|
||||
absTestRootDir = testRootDir;
|
||||
@ -90,12 +89,11 @@ public String getAbsoluteTestRootDir(FileContext fc)
|
||||
return absTestRootDir;
|
||||
}
|
||||
|
||||
public Path getAbsoluteTestRootPath(FileContext fc) throws IOException {
|
||||
public Path getAbsoluteTestRootPath(FileContext fc) {
|
||||
return fc.makeQualified(new Path(getAbsoluteTestRootDir(fc)));
|
||||
}
|
||||
|
||||
public Path getDefaultWorkingDirectory(FileContext fc)
|
||||
throws IOException {
|
||||
public Path getDefaultWorkingDirectory(FileContext fc) {
|
||||
return getTestRootPath(fc, "/user/" + System.getProperty("user.name"))
|
||||
.makeQualified(fc.getDefaultFileSystem().getUri(),
|
||||
fc.getWorkingDirectory());
|
||||
@ -106,8 +104,7 @@ public Path getDefaultWorkingDirectory(FileContext fc)
|
||||
*/
|
||||
public static long createFile(FileContext fc, Path path, int numBlocks,
|
||||
CreateOpts... options) throws IOException {
|
||||
BlockSize blockSizeOpt =
|
||||
(BlockSize) CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
|
||||
BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
|
||||
long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue()
|
||||
: DEFAULT_BLOCK_SIZE;
|
||||
FSDataOutputStream out =
|
||||
@ -146,8 +143,7 @@ public static long createFileNonRecursive(FileContext fc, Path path)
|
||||
|
||||
public static void appendToFile(FileContext fc, Path path, int numBlocks,
|
||||
CreateOpts... options) throws IOException {
|
||||
BlockSize blockSizeOpt =
|
||||
(BlockSize) CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
|
||||
BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
|
||||
long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue()
|
||||
: DEFAULT_BLOCK_SIZE;
|
||||
FSDataOutputStream out;
|
||||
@ -203,14 +199,11 @@ public static byte[] readFile(FileContext fc, Path path, int len)
|
||||
}
|
||||
|
||||
public FileStatus containsPath(FileContext fc, Path path,
|
||||
FileStatus[] dirList)
|
||||
throws IOException {
|
||||
FileStatus[] dirList) {
|
||||
return containsPath(getTestRootPath(fc, path.toString()), dirList);
|
||||
}
|
||||
|
||||
public static FileStatus containsPath(Path path,
|
||||
FileStatus[] dirList)
|
||||
throws IOException {
|
||||
public static FileStatus containsPath(Path path, FileStatus[] dirList) {
|
||||
for(int i = 0; i < dirList.length; i ++) {
|
||||
if (path.equals(dirList[i].getPath()))
|
||||
return dirList[i];
|
||||
@ -219,8 +212,7 @@ public static FileStatus containsPath(Path path,
|
||||
}
|
||||
|
||||
public FileStatus containsPath(FileContext fc, String path,
|
||||
FileStatus[] dirList)
|
||||
throws IOException {
|
||||
FileStatus[] dirList) {
|
||||
return containsPath(fc, new Path(path), dirList);
|
||||
}
|
||||
|
||||
|
@ -62,8 +62,7 @@ public Path getDefaultWorkingDirectory() throws IOException {
|
||||
*/
|
||||
public long createFile(Path path, int numBlocks, CreateOpts... options)
|
||||
throws IOException {
|
||||
BlockSize blockSizeOpt =
|
||||
(BlockSize) CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
|
||||
BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
|
||||
long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue()
|
||||
: DEFAULT_BLOCK_SIZE;
|
||||
FSDataOutputStream out =
|
||||
@ -100,8 +99,7 @@ public long createFileNonRecursive(Path path) throws IOException {
|
||||
|
||||
public void appendToFile(Path path, int numBlocks, CreateOpts... options)
|
||||
throws IOException {
|
||||
BlockSize blockSizeOpt =
|
||||
(BlockSize) CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
|
||||
BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
|
||||
long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue()
|
||||
: DEFAULT_BLOCK_SIZE;
|
||||
FSDataOutputStream out;
|
||||
|
@ -63,8 +63,7 @@ public Path getDefaultWorkingDirectory() throws IOException {
|
||||
*/
|
||||
public long createFile(Path path, int numBlocks, CreateOpts... options)
|
||||
throws IOException {
|
||||
BlockSize blockSizeOpt =
|
||||
(BlockSize) CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
|
||||
BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
|
||||
long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue()
|
||||
: DEFAULT_BLOCK_SIZE;
|
||||
FSDataOutputStream out =
|
||||
@ -101,8 +100,7 @@ public long createFileNonRecursive(Path path) throws IOException {
|
||||
|
||||
public void appendToFile(Path path, int numBlocks, CreateOpts... options)
|
||||
throws IOException {
|
||||
BlockSize blockSizeOpt =
|
||||
(BlockSize) CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
|
||||
BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options);
|
||||
long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue()
|
||||
: DEFAULT_BLOCK_SIZE;
|
||||
FSDataOutputStream out;
|
||||
@ -261,7 +259,7 @@ public FSDataOutputStream create(Path f, EnumSet<CreateFlag> createFlag,
|
||||
// Need to translate the FileContext-style options into FileSystem-style
|
||||
|
||||
// Permissions with umask
|
||||
CreateOpts.Perms permOpt = (CreateOpts.Perms) CreateOpts.getOpt(
|
||||
CreateOpts.Perms permOpt = CreateOpts.getOpt(
|
||||
CreateOpts.Perms.class, opts);
|
||||
FsPermission umask = FsPermission.getUMask(fs.getConf());
|
||||
FsPermission permission = (permOpt != null) ? permOpt.getValue()
|
||||
@ -273,23 +271,22 @@ public FSDataOutputStream create(Path f, EnumSet<CreateFlag> createFlag,
|
||||
int bufferSize = fs.getConf().getInt(
|
||||
CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY,
|
||||
CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT);
|
||||
CreateOpts.BufferSize bufOpt = (CreateOpts.BufferSize) CreateOpts.getOpt(
|
||||
CreateOpts.BufferSize bufOpt = CreateOpts.getOpt(
|
||||
CreateOpts.BufferSize.class, opts);
|
||||
bufferSize = (bufOpt != null) ? bufOpt.getValue() : bufferSize;
|
||||
// replication
|
||||
short replication = fs.getDefaultReplication(f);
|
||||
CreateOpts.ReplicationFactor repOpt =
|
||||
(CreateOpts.ReplicationFactor) CreateOpts.getOpt(
|
||||
CreateOpts.ReplicationFactor.class, opts);
|
||||
CreateOpts.getOpt(CreateOpts.ReplicationFactor.class, opts);
|
||||
replication = (repOpt != null) ? repOpt.getValue() : replication;
|
||||
// blockSize
|
||||
long blockSize = fs.getDefaultBlockSize(f);
|
||||
CreateOpts.BlockSize blockOpt = (CreateOpts.BlockSize) CreateOpts.getOpt(
|
||||
CreateOpts.BlockSize blockOpt = CreateOpts.getOpt(
|
||||
CreateOpts.BlockSize.class, opts);
|
||||
blockSize = (blockOpt != null) ? blockOpt.getValue() : blockSize;
|
||||
// Progressable
|
||||
Progressable progress = null;
|
||||
CreateOpts.Progress progressOpt = (CreateOpts.Progress) CreateOpts.getOpt(
|
||||
CreateOpts.Progress progressOpt = CreateOpts.getOpt(
|
||||
CreateOpts.Progress.class, opts);
|
||||
progress = (progressOpt != null) ? progressOpt.getValue() : progress;
|
||||
return fs.create(f, permission, overwrite, bufferSize, replication,
|
||||
|
Loading…
Reference in New Issue
Block a user