diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index c907938896..dd6f68a6d7 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -321,6 +321,10 @@ Release 2.5.0 - UNRELEASED HADOOP-10418. SaslRpcClient should not assume that remote principals are in the default_realm. (atm) + HADOOP-10426. Declare CreateOpts.getOpt(..) with generic type argument, + removes unused FileContext.getFileStatus(..) and fixes various javac + warnings. (szetszwo) + Release 2.4.0 - UNRELEASED INCOMPATIBLE CHANGES diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java index 6942758dd9..d0303be71a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java @@ -750,13 +750,12 @@ public void createSymlink(final Path target, final Path link, * Partially resolves the path. This is used during symlink resolution in * {@link FSLinkResolver}, and differs from the similarly named method * {@link FileContext#getLinkTarget(Path)}. + * @throws IOException subclass implementations may throw IOException */ public Path getLinkTarget(final Path f) throws IOException { - /* We should never get here. Any file system that threw an - * UnresolvedLinkException, causing this function to be called, - * needs to override this method. - */ - throw new AssertionError(); + throw new AssertionError("Implementation Error: " + getClass() + + " that threw an UnresolvedLinkException, causing this method to be" + + " called, needs to override this method."); } /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java index a77ca43772..c8609d450f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java @@ -33,7 +33,7 @@ @InterfaceAudience.Public @InterfaceStability.Stable public class FSDataInputStream extends DataInputStream - implements Seekable, PositionedReadable, Closeable, + implements Seekable, PositionedReadable, ByteBufferReadable, HasFileDescriptor, CanSetDropBehind, CanSetReadahead, HasEnhancedByteBufferAccess { /** @@ -44,8 +44,7 @@ public class FSDataInputStream extends DataInputStream extendedReadBuffers = new IdentityHashStore(0); - public FSDataInputStream(InputStream in) - throws IOException { + public FSDataInputStream(InputStream in) { super(in); if( !(in instanceof Seekable) || !(in instanceof PositionedReadable) ) { throw new IllegalArgumentException( diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java index 90ccc81150..e4619cd1ce 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java @@ -652,8 +652,7 @@ public FSDataOutputStream create(final Path f, // If not, add a default Perms and apply umask; // AbstractFileSystem#create - CreateOpts.Perms permOpt = - (CreateOpts.Perms) CreateOpts.getOpt(CreateOpts.Perms.class, opts); + CreateOpts.Perms permOpt = CreateOpts.getOpt(CreateOpts.Perms.class, opts); FsPermission permission = (permOpt != null) ? permOpt.getValue() : FILE_DEFAULT_PERM; permission = permission.applyUMask(umask); @@ -1520,40 +1519,6 @@ public boolean exists(final Path f) throws AccessControlException, } } - /** - * Return a list of file status objects that corresponds to supplied paths - * excluding those non-existent paths. - * - * @param paths list of paths we want information from - * - * @return a list of FileStatus objects - * - * @throws AccessControlException If access is denied - * @throws IOException If an I/O error occurred - * - * Exceptions applicable to file systems accessed over RPC: - * @throws RpcClientException If an exception occurred in the RPC client - * @throws RpcServerException If an exception occurred in the RPC server - * @throws UnexpectedServerException If server implementation throws - * undeclared exception to RPC server - */ - private FileStatus[] getFileStatus(Path[] paths) - throws AccessControlException, IOException { - if (paths == null) { - return null; - } - ArrayList results = new ArrayList(paths.length); - for (int i = 0; i < paths.length; i++) { - try { - results.add(FileContext.this.getFileStatus(paths[i])); - } catch (FileNotFoundException fnfe) { - // ignoring - } - } - return results.toArray(new FileStatus[results.size()]); - } - - /** * Return the {@link ContentSummary} of path f. * @param f path diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java index c91088eab0..2239040ca3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java @@ -53,8 +53,7 @@ protected AbstractFileSystem getMyFs() { return myFs; } - protected FilterFs(AbstractFileSystem fs) throws IOException, - URISyntaxException { + protected FilterFs(AbstractFileSystem fs) throws URISyntaxException { super(fs.getUri(), fs.getUri().getScheme(), fs.getUri().getAuthority() != null, fs.getUriDefaultPort()); myFs = fs; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java index 8464e51270..e070943bb2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java @@ -150,21 +150,25 @@ protected CreateParent(boolean createPar) { /** * Get an option of desired type - * @param theClass is the desired class of the opt + * @param clazz is the desired class of the opt * @param opts - not null - at least one opt must be passed * @return an opt from one of the opts of type theClass. * returns null if there isn't any */ - protected static CreateOpts getOpt(Class theClass, CreateOpts ...opts) { + static T getOpt(Class clazz, CreateOpts... opts) { if (opts == null) { throw new IllegalArgumentException("Null opt"); } - CreateOpts result = null; + T result = null; for (int i = 0; i < opts.length; ++i) { - if (opts[i].getClass() == theClass) { - if (result != null) - throw new IllegalArgumentException("multiple blocksize varargs"); - result = opts[i]; + if (opts[i].getClass() == clazz) { + if (result != null) { + throw new IllegalArgumentException("multiple opts varargs: " + clazz); + } + + @SuppressWarnings("unchecked") + T t = (T)opts[i]; + result = t; } } return result; @@ -175,14 +179,16 @@ protected static CreateOpts getOpt(Class theClass, Create * @param opts - the option is set into this array of opts * @return updated CreateOpts[] == opts + newValue */ - protected static CreateOpts[] setOpt(T newValue, - CreateOpts ...opts) { + static CreateOpts[] setOpt(final T newValue, + final CreateOpts... opts) { + final Class clazz = newValue.getClass(); boolean alreadyInOpts = false; if (opts != null) { for (int i = 0; i < opts.length; ++i) { - if (opts[i].getClass() == newValue.getClass()) { - if (alreadyInOpts) - throw new IllegalArgumentException("multiple opts varargs"); + if (opts[i].getClass() == clazz) { + if (alreadyInOpts) { + throw new IllegalArgumentException("multiple opts varargs: " + clazz); + } alreadyInOpts = true; opts[i] = newValue; } @@ -190,9 +196,12 @@ protected static CreateOpts[] setOpt(T newValue, } CreateOpts[] resultOpt = opts; if (!alreadyInOpts) { // no newValue in opt - CreateOpts[] newOpts = new CreateOpts[opts.length + 1]; - System.arraycopy(opts, 0, newOpts, 0, opts.length); - newOpts[opts.length] = newValue; + final int oldLength = opts == null? 0: opts.length; + CreateOpts[] newOpts = new CreateOpts[oldLength + 1]; + if (oldLength > 0) { + System.arraycopy(opts, 0, newOpts, 0, oldLength); + } + newOpts[oldLength] = newValue; resultOpt = newOpts; } return resultOpt; @@ -273,50 +282,29 @@ public static ChecksumOpt createDisabled() { */ public static ChecksumOpt processChecksumOpt(ChecksumOpt defaultOpt, ChecksumOpt userOpt, int userBytesPerChecksum) { - // The following is done to avoid unnecessary creation of new objects. - // tri-state variable: 0 default, 1 userBytesPerChecksum, 2 userOpt - short whichSize; - // true default, false userOpt - boolean useDefaultType; - + final boolean useDefaultType; + final DataChecksum.Type type; + if (userOpt != null + && userOpt.getChecksumType() != DataChecksum.Type.DEFAULT) { + useDefaultType = false; + type = userOpt.getChecksumType(); + } else { + useDefaultType = true; + type = defaultOpt.getChecksumType(); + } + // bytesPerChecksum - order of preference // user specified value in bytesPerChecksum // user specified value in checksumOpt // default. if (userBytesPerChecksum > 0) { - whichSize = 1; // userBytesPerChecksum - } else if (userOpt != null && userOpt.getBytesPerChecksum() > 0) { - whichSize = 2; // userOpt - } else { - whichSize = 0; // default - } - - // checksum type - order of preference - // user specified value in checksumOpt - // default. - if (userOpt != null && - userOpt.getChecksumType() != DataChecksum.Type.DEFAULT) { - useDefaultType = false; - } else { - useDefaultType = true; - } - - // Short out the common and easy cases - if (whichSize == 0 && useDefaultType) { - return defaultOpt; - } else if (whichSize == 2 && !useDefaultType) { - return userOpt; - } - - // Take care of the rest of combinations - DataChecksum.Type type = useDefaultType ? defaultOpt.getChecksumType() : - userOpt.getChecksumType(); - if (whichSize == 0) { - return new ChecksumOpt(type, defaultOpt.getBytesPerChecksum()); - } else if (whichSize == 1) { return new ChecksumOpt(type, userBytesPerChecksum); + } else if (userOpt != null && userOpt.getBytesPerChecksum() > 0) { + return !useDefaultType? userOpt + : new ChecksumOpt(type, userOpt.getBytesPerChecksum()); } else { - return new ChecksumOpt(type, userOpt.getBytesPerChecksum()); + return useDefaultType? defaultOpt + : new ChecksumOpt(type, defaultOpt.getBytesPerChecksum()); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFactory.java index f5d9d5a801..dec8373811 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFactory.java @@ -24,7 +24,6 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.util.ReflectionUtils; @@ -35,7 +34,7 @@ @InterfaceAudience.Private @InterfaceStability.Unstable -public class CommandFactory extends Configured implements Configurable { +public class CommandFactory extends Configured { private Map> classMap = new HashMap>(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java index 21f9d2ca9e..0e2283c993 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java @@ -18,16 +18,20 @@ package org.apache.hadoop.fs.shell; -import java.io.*; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; -import java.util.Arrays; import java.util.LinkedList; import java.util.List; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.fs.*; +import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.PathIsDirectoryException; import org.apache.hadoop.io.IOUtils; /** Various commands for copy files */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java index 143bf12db8..79bb824c43 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java @@ -18,12 +18,12 @@ package org.apache.hadoop.fs.shell; import java.io.ByteArrayOutputStream; -import java.io.File; -import java.io.InputStream; import java.io.IOException; +import java.io.InputStream; import java.util.LinkedList; import java.util.zip.GZIPInputStream; +import org.apache.avro.Schema; import org.apache.avro.file.DataFileReader; import org.apache.avro.file.FileReader; import org.apache.avro.generic.GenericDatumReader; @@ -31,7 +31,6 @@ import org.apache.avro.io.DatumWriter; import org.apache.avro.io.EncoderFactory; import org.apache.avro.io.JsonEncoder; -import org.apache.avro.Schema; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -253,7 +252,7 @@ protected static class AvroFileInputStream extends InputStream { private int pos; private byte[] buffer; private ByteArrayOutputStream output; - private FileReader fileReader; + private FileReader fileReader; private DatumWriter writer; private JsonEncoder encoder; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java index 84bb234767..f34870eb62 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java @@ -570,7 +570,7 @@ private static URI stringToUri(String pathString) throws IOException { @Override public int compareTo(PathData o) { - return path.compareTo(((PathData)o).path); + return path.compareTo(o.path); } @Override diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java index 995d822434..9cc2ef77b9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java @@ -1091,12 +1091,7 @@ private static boolean isSessionExpired(Code code) { } private static boolean shouldRetry(Code code) { - switch (code) { - case CONNECTIONLOSS: - case OPERATIONTIMEOUT: - return true; - } - return false; + return code == Code.CONNECTIONLOSS || code == Code.OPERATIONTIMEOUT; } @Override diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java index 15008addc8..0d1e6e9734 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java @@ -19,8 +19,6 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.CommonConfigurationKeysPublic; /** * Singleton to get access to Http related configuration. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java index 67b89e1e5a..d2664dcf2b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java @@ -434,7 +434,7 @@ private static void addNoCacheFilter(WebAppContext ctxt) { * provided. This wrapper and all subclasses must create at least one * listener. */ - public Connector createBaseListener(Configuration conf) throws IOException { + public Connector createBaseListener(Configuration conf) { return HttpServer2.createDefaultChannelConnector(); } @@ -527,8 +527,7 @@ protected void addDefaultServlets() { addServlet("conf", "/conf", ConfServlet.class); } - public void addContext(Context ctxt, boolean isFiltered) - throws IOException { + public void addContext(Context ctxt, boolean isFiltered) { webServer.addHandler(ctxt); addNoCacheFilter(webAppContext); defaultContexts.put(ctxt, isFiltered); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java index 54903394fe..cc1e517eaa 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java @@ -48,11 +48,11 @@ public abstract class AbstractMapWritable implements Writable, Configurable { /* Class to id mappings */ @VisibleForTesting - Map classToIdMap = new ConcurrentHashMap(); + Map, Byte> classToIdMap = new ConcurrentHashMap, Byte>(); /* Id to Class mappings */ @VisibleForTesting - Map idToClassMap = new ConcurrentHashMap(); + Map> idToClassMap = new ConcurrentHashMap>(); /* The number of new classes (those not established by the constructor) */ private volatile byte newClasses = 0; @@ -65,7 +65,7 @@ byte getNewClasses() { /** * Used to add "predefined" classes and by Writable to copy "new" classes. */ - private synchronized void addToMap(Class clazz, byte id) { + private synchronized void addToMap(Class clazz, byte id) { if (classToIdMap.containsKey(clazz)) { byte b = classToIdMap.get(clazz); if (b != id) { @@ -74,7 +74,7 @@ private synchronized void addToMap(Class clazz, byte id) { } } if (idToClassMap.containsKey(id)) { - Class c = idToClassMap.get(id); + Class c = idToClassMap.get(id); if (!c.equals(clazz)) { throw new IllegalArgumentException("Id " + id + " exists but maps to " + c.getName() + " and not " + clazz.getName()); @@ -85,7 +85,7 @@ private synchronized void addToMap(Class clazz, byte id) { } /** Add a Class to the maps if it is not already present. */ - protected synchronized void addToMap(Class clazz) { + protected synchronized void addToMap(Class clazz) { if (classToIdMap.containsKey(clazz)) { return; } @@ -98,12 +98,12 @@ protected synchronized void addToMap(Class clazz) { } /** @return the Class class for the specified id */ - protected Class getClass(byte id) { + protected Class getClass(byte id) { return idToClassMap.get(id); } /** @return the id for the specified Class */ - protected byte getId(Class clazz) { + protected byte getId(Class clazz) { return classToIdMap.containsKey(clazz) ? classToIdMap.get(clazz) : -1; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java index 72c7098d7a..fec168b831 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java @@ -82,7 +82,7 @@ public boolean equals(Object obj) { } if (obj instanceof MapWritable) { - Map map = (Map) obj; + MapWritable map = (MapWritable) obj; if (size() != map.size()) { return false; } @@ -114,7 +114,6 @@ public Set keySet() { } @Override - @SuppressWarnings("unchecked") public Writable put(Writable key, Writable value) { addToMap(key.getClass()); addToMap(value.getClass()); @@ -163,7 +162,6 @@ public void write(DataOutput out) throws IOException { } } - @SuppressWarnings("unchecked") @Override public void readFields(DataInput in) throws IOException { super.readFields(in); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java index 0074b01f90..698d38d9c3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java @@ -77,8 +77,7 @@ public Path getTestRootPath(FileContext fc, String pathString) { // the getAbsolutexxx method is needed because the root test dir // can be messed up by changing the working dir. - public String getAbsoluteTestRootDir(FileContext fc) - throws IOException { + public String getAbsoluteTestRootDir(FileContext fc) { if (absTestRootDir == null) { if (new Path(testRootDir).isAbsolute()) { absTestRootDir = testRootDir; @@ -90,12 +89,11 @@ public String getAbsoluteTestRootDir(FileContext fc) return absTestRootDir; } - public Path getAbsoluteTestRootPath(FileContext fc) throws IOException { + public Path getAbsoluteTestRootPath(FileContext fc) { return fc.makeQualified(new Path(getAbsoluteTestRootDir(fc))); } - public Path getDefaultWorkingDirectory(FileContext fc) - throws IOException { + public Path getDefaultWorkingDirectory(FileContext fc) { return getTestRootPath(fc, "/user/" + System.getProperty("user.name")) .makeQualified(fc.getDefaultFileSystem().getUri(), fc.getWorkingDirectory()); @@ -106,8 +104,7 @@ public Path getDefaultWorkingDirectory(FileContext fc) */ public static long createFile(FileContext fc, Path path, int numBlocks, CreateOpts... options) throws IOException { - BlockSize blockSizeOpt = - (BlockSize) CreateOpts.getOpt(CreateOpts.BlockSize.class, options); + BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options); long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue() : DEFAULT_BLOCK_SIZE; FSDataOutputStream out = @@ -146,8 +143,7 @@ public static long createFileNonRecursive(FileContext fc, Path path) public static void appendToFile(FileContext fc, Path path, int numBlocks, CreateOpts... options) throws IOException { - BlockSize blockSizeOpt = - (BlockSize) CreateOpts.getOpt(CreateOpts.BlockSize.class, options); + BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options); long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue() : DEFAULT_BLOCK_SIZE; FSDataOutputStream out; @@ -203,14 +199,11 @@ public static byte[] readFile(FileContext fc, Path path, int len) } public FileStatus containsPath(FileContext fc, Path path, - FileStatus[] dirList) - throws IOException { + FileStatus[] dirList) { return containsPath(getTestRootPath(fc, path.toString()), dirList); } - public static FileStatus containsPath(Path path, - FileStatus[] dirList) - throws IOException { + public static FileStatus containsPath(Path path, FileStatus[] dirList) { for(int i = 0; i < dirList.length; i ++) { if (path.equals(dirList[i].getPath())) return dirList[i]; @@ -219,8 +212,7 @@ public static FileStatus containsPath(Path path, } public FileStatus containsPath(FileContext fc, String path, - FileStatus[] dirList) - throws IOException { + FileStatus[] dirList) { return containsPath(fc, new Path(path), dirList); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java index e10b22edb7..0dd1e9aa3e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java @@ -62,8 +62,7 @@ public Path getDefaultWorkingDirectory() throws IOException { */ public long createFile(Path path, int numBlocks, CreateOpts... options) throws IOException { - BlockSize blockSizeOpt = - (BlockSize) CreateOpts.getOpt(CreateOpts.BlockSize.class, options); + BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options); long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue() : DEFAULT_BLOCK_SIZE; FSDataOutputStream out = @@ -100,8 +99,7 @@ public long createFileNonRecursive(Path path) throws IOException { public void appendToFile(Path path, int numBlocks, CreateOpts... options) throws IOException { - BlockSize blockSizeOpt = - (BlockSize) CreateOpts.getOpt(CreateOpts.BlockSize.class, options); + BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options); long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue() : DEFAULT_BLOCK_SIZE; FSDataOutputStream out; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestWrapper.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestWrapper.java index eb5df084b9..9a5f40edf6 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestWrapper.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestWrapper.java @@ -63,8 +63,7 @@ public Path getDefaultWorkingDirectory() throws IOException { */ public long createFile(Path path, int numBlocks, CreateOpts... options) throws IOException { - BlockSize blockSizeOpt = - (BlockSize) CreateOpts.getOpt(CreateOpts.BlockSize.class, options); + BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options); long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue() : DEFAULT_BLOCK_SIZE; FSDataOutputStream out = @@ -101,8 +100,7 @@ public long createFileNonRecursive(Path path) throws IOException { public void appendToFile(Path path, int numBlocks, CreateOpts... options) throws IOException { - BlockSize blockSizeOpt = - (BlockSize) CreateOpts.getOpt(CreateOpts.BlockSize.class, options); + BlockSize blockSizeOpt = CreateOpts.getOpt(CreateOpts.BlockSize.class, options); long blockSize = blockSizeOpt != null ? blockSizeOpt.getValue() : DEFAULT_BLOCK_SIZE; FSDataOutputStream out; @@ -261,7 +259,7 @@ public FSDataOutputStream create(Path f, EnumSet createFlag, // Need to translate the FileContext-style options into FileSystem-style // Permissions with umask - CreateOpts.Perms permOpt = (CreateOpts.Perms) CreateOpts.getOpt( + CreateOpts.Perms permOpt = CreateOpts.getOpt( CreateOpts.Perms.class, opts); FsPermission umask = FsPermission.getUMask(fs.getConf()); FsPermission permission = (permOpt != null) ? permOpt.getValue() @@ -273,23 +271,22 @@ public FSDataOutputStream create(Path f, EnumSet createFlag, int bufferSize = fs.getConf().getInt( CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY, CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT); - CreateOpts.BufferSize bufOpt = (CreateOpts.BufferSize) CreateOpts.getOpt( + CreateOpts.BufferSize bufOpt = CreateOpts.getOpt( CreateOpts.BufferSize.class, opts); bufferSize = (bufOpt != null) ? bufOpt.getValue() : bufferSize; // replication short replication = fs.getDefaultReplication(f); CreateOpts.ReplicationFactor repOpt = - (CreateOpts.ReplicationFactor) CreateOpts.getOpt( - CreateOpts.ReplicationFactor.class, opts); + CreateOpts.getOpt(CreateOpts.ReplicationFactor.class, opts); replication = (repOpt != null) ? repOpt.getValue() : replication; // blockSize long blockSize = fs.getDefaultBlockSize(f); - CreateOpts.BlockSize blockOpt = (CreateOpts.BlockSize) CreateOpts.getOpt( + CreateOpts.BlockSize blockOpt = CreateOpts.getOpt( CreateOpts.BlockSize.class, opts); blockSize = (blockOpt != null) ? blockOpt.getValue() : blockSize; // Progressable Progressable progress = null; - CreateOpts.Progress progressOpt = (CreateOpts.Progress) CreateOpts.getOpt( + CreateOpts.Progress progressOpt = CreateOpts.getOpt( CreateOpts.Progress.class, opts); progress = (progressOpt != null) ? progressOpt.getValue() : progress; return fs.create(f, permission, overwrite, bufferSize, replication,