diff --git a/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/RootDocProcessor.java b/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/RootDocProcessor.java index 2783bf3b30..a6ce035fa9 100644 --- a/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/RootDocProcessor.java +++ b/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/RootDocProcessor.java @@ -97,6 +97,7 @@ public ExcludeHandler(Object target) { this.target = target; } + @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { String methodName = method.getName(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java index cf7aafafb7..f1cb41dd6d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java @@ -1847,6 +1847,7 @@ public void clear() { * * @return an iterator over the entries. */ + @Override public Iterator> iterator() { // Get a copy of just the string to string pairs. After the old object // methods that allow non-strings to be put into configurations are removed, @@ -2272,6 +2273,7 @@ public void readFields(DataInput in) throws IOException { } //@Override + @Override public void write(DataOutput out) throws IOException { Properties props = getProps(); WritableUtils.writeVInt(out, props.size()); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java index 2bc7e537e4..f06af2b98d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java @@ -39,11 +39,13 @@ public Configured(Configuration conf) { } // inherit javadoc + @Override public void setConf(Configuration conf) { this.conf = conf; } // inherit javadoc + @Override public Configuration getConf() { return conf; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java index 041b263edd..452d29f7b7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java @@ -23,12 +23,10 @@ import org.apache.commons.lang.StringEscapeUtils; import java.util.Collection; -import java.util.Map; import java.util.Enumeration; import java.io.IOException; import java.io.PrintWriter; -import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; @@ -57,9 +55,6 @@ public class ReconfigurationServlet extends HttpServlet { public static final String CONF_SERVLET_RECONFIGURABLE_PREFIX = "conf.servlet.reconfigurable."; - /** - * {@inheritDoc} - */ @Override public void init() throws ServletException { super.init(); @@ -202,9 +197,6 @@ private void applyChanges(PrintWriter out, Reconfigurable reconf, } } - /** - * {@inheritDoc} - */ @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { @@ -219,9 +211,6 @@ protected void doGet(HttpServletRequest req, HttpServletResponse resp) printFooter(out); } - /** - * {@inheritDoc} - */ @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java index d9eda44580..6adbeab60a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java @@ -47,7 +47,6 @@ import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.token.Token; -import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.Progressable; /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java index a319fb7b36..b4a4a85674 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java @@ -45,22 +45,27 @@ public AvroFSInput(final FileContext fc, final Path p) throws IOException { this.stream = fc.open(p); } + @Override public long length() { return len; } + @Override public int read(byte[] b, int off, int len) throws IOException { return stream.read(b, off, len); } + @Override public void seek(long p) throws IOException { stream.seek(p); } + @Override public long tell() throws IOException { return stream.getPos(); } + @Override public void close() throws IOException { stream.close(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java index cfe9ee8c66..fa095343c5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java @@ -204,6 +204,7 @@ public void setTopologyPaths(String[] topologyPaths) throws IOException { } } + @Override public String toString() { StringBuilder result = new StringBuilder(); result.append(offset); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BufferedFSInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BufferedFSInputStream.java index f322924012..745148281d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BufferedFSInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BufferedFSInputStream.java @@ -19,7 +19,6 @@ import java.io.BufferedInputStream; import java.io.FileDescriptor; -import java.io.FileInputStream; import java.io.IOException; import org.apache.hadoop.classification.InterfaceAudience; @@ -50,10 +49,12 @@ public BufferedFSInputStream(FSInputStream in, int size) { super(in, size); } + @Override public long getPos() throws IOException { return ((FSInputStream)in).getPos()-(count-pos); } + @Override public long skip(long n) throws IOException { if (n <= 0) { return 0; @@ -63,6 +64,7 @@ public long skip(long n) throws IOException { return n; } + @Override public void seek(long pos) throws IOException { if( pos<0 ) { return; @@ -82,20 +84,24 @@ public void seek(long pos) throws IOException { ((FSInputStream)in).seek(pos); } + @Override public boolean seekToNewSource(long targetPos) throws IOException { pos = 0; count = 0; return ((FSInputStream)in).seekToNewSource(targetPos); } + @Override public int read(long position, byte[] buffer, int offset, int length) throws IOException { return ((FSInputStream)in).read(position, buffer, offset, length) ; } + @Override public void readFully(long position, byte[] buffer, int offset, int length) throws IOException { ((FSInputStream)in).readFully(position, buffer, offset, length); } + @Override public void readFully(long position, byte[] buffer) throws IOException { ((FSInputStream)in).readFully(position, buffer); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java index 17707718b8..42ee870268 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java @@ -53,6 +53,7 @@ public ChecksumFileSystem(FileSystem fs) { super(fs); } + @Override public void setConf(Configuration conf) { super.setConf(conf); if (conf != null) { @@ -64,6 +65,7 @@ public void setConf(Configuration conf) { /** * Set whether to verify checksum. */ + @Override public void setVerifyChecksum(boolean verifyChecksum) { this.verifyChecksum = verifyChecksum; } @@ -74,6 +76,7 @@ public void setWriteChecksum(boolean writeChecksum) { } /** get the raw file system */ + @Override public FileSystem getRawFileSystem() { return fs; } @@ -162,14 +165,17 @@ private long getChecksumFilePos( long dataPos ) { return HEADER_LENGTH + 4*(dataPos/bytesPerSum); } + @Override protected long getChunkPosition( long dataPos ) { return dataPos/bytesPerSum*bytesPerSum; } + @Override public int available() throws IOException { return datas.available() + super.available(); } + @Override public int read(long position, byte[] b, int off, int len) throws IOException { // parameter check @@ -190,6 +196,7 @@ public int read(long position, byte[] b, int off, int len) return nread; } + @Override public void close() throws IOException { datas.close(); if( sums != null ) { @@ -290,6 +297,7 @@ private long getFileLength() throws IOException { * @exception IOException if an I/O error occurs. * ChecksumException if the chunk to skip to is corrupted */ + @Override public synchronized long skip(long n) throws IOException { long curPos = getPos(); long fileLength = getFileLength(); @@ -311,6 +319,7 @@ public synchronized long skip(long n) throws IOException { * ChecksumException if the chunk to seek to is corrupted */ + @Override public synchronized void seek(long pos) throws IOException { if(pos>getFileLength()) { throw new IOException("Cannot seek after EOF"); @@ -339,7 +348,7 @@ public FSDataInputStream open(Path f, int bufferSize) throws IOException { return new FSDataBoundedInputStream(fs, f, in); } - /** {@inheritDoc} */ + @Override public FSDataOutputStream append(Path f, int bufferSize, Progressable progress) throws IOException { throw new IOException("Not supported"); @@ -398,6 +407,7 @@ public ChecksumFSOutputSummer(ChecksumFileSystem fs, sums.writeInt(bytesPerSum); } + @Override public void close() throws IOException { flushBuffer(); sums.close(); @@ -412,7 +422,6 @@ protected void writeChunk(byte[] b, int offset, int len, byte[] checksum) } } - /** {@inheritDoc} */ @Override public FSDataOutputStream create(Path f, FsPermission permission, boolean overwrite, int bufferSize, short replication, long blockSize, @@ -454,7 +463,6 @@ private FSDataOutputStream create(Path f, FsPermission permission, return out; } - /** {@inheritDoc} */ @Override public FSDataOutputStream createNonRecursive(Path f, FsPermission permission, boolean overwrite, int bufferSize, short replication, long blockSize, @@ -472,6 +480,7 @@ public FSDataOutputStream createNonRecursive(Path f, FsPermission permission, * @return true if successful; * false if file does not exist or is a directory */ + @Override public boolean setReplication(Path src, short replication) throws IOException { boolean value = fs.setReplication(src, replication); if (!value) @@ -487,6 +496,7 @@ public boolean setReplication(Path src, short replication) throws IOException { /** * Rename files/dirs */ + @Override public boolean rename(Path src, Path dst) throws IOException { if (fs.isDirectory(src)) { return fs.rename(src, dst); @@ -516,6 +526,7 @@ public boolean rename(Path src, Path dst) throws IOException { * Implement the delete(Path, boolean) in checksum * file system. */ + @Override public boolean delete(Path f, boolean recursive) throws IOException{ FileStatus fstatus = null; try { @@ -538,6 +549,7 @@ public boolean delete(Path f, boolean recursive) throws IOException{ } final private static PathFilter DEFAULT_FILTER = new PathFilter() { + @Override public boolean accept(Path file) { return !isChecksumFile(file); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java index 4784991982..12805d86a6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java @@ -32,7 +32,6 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.PureJavaCrc32; -import org.apache.hadoop.util.StringUtils; /** * Abstract Checksumed Fs. @@ -61,6 +60,7 @@ public ChecksumFs(AbstractFileSystem theFs) /** * Set whether to verify checksum. */ + @Override public void setVerifyChecksum(boolean inVerifyChecksum) { this.verifyChecksum = inVerifyChecksum; } @@ -152,14 +152,17 @@ private long getChecksumFilePos(long dataPos) { return HEADER_LENGTH + 4*(dataPos/bytesPerSum); } + @Override protected long getChunkPosition(long dataPos) { return dataPos/bytesPerSum*bytesPerSum; } + @Override public int available() throws IOException { return datas.available() + super.available(); } + @Override public int read(long position, byte[] b, int off, int len) throws IOException, UnresolvedLinkException { // parameter check @@ -180,6 +183,7 @@ public int read(long position, byte[] b, int off, int len) return nread; } + @Override public void close() throws IOException { datas.close(); if (sums != null) { @@ -258,6 +262,7 @@ private long getFileLength() throws IOException, UnresolvedLinkException { * @exception IOException if an I/O error occurs. * ChecksumException if the chunk to skip to is corrupted */ + @Override public synchronized long skip(long n) throws IOException { final long curPos = getPos(); final long fileLength = getFileLength(); @@ -279,6 +284,7 @@ public synchronized long skip(long n) throws IOException { * ChecksumException if the chunk to seek to is corrupted */ + @Override public synchronized void seek(long pos) throws IOException { if (pos>getFileLength()) { throw new IOException("Cannot seek after EOF"); @@ -348,6 +354,7 @@ public ChecksumFSOutputSummer(final ChecksumFs fs, final Path file, sums.writeInt(bytesPerSum); } + @Override public void close() throws IOException { flushBuffer(); sums.close(); @@ -447,6 +454,7 @@ public void renameInternal(Path src, Path dst) * Implement the delete(Path, boolean) in checksum * file system. */ + @Override public boolean delete(Path f, boolean recursive) throws IOException, UnresolvedLinkException { FileStatus fstatus = null; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java index c0ab82de1d..0d685b43e1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java @@ -75,7 +75,7 @@ public ContentSummary( /** Returns (disk) space quota */ public long getSpaceQuota() {return spaceQuota;} - /** {@inheritDoc} */ + @Override @InterfaceAudience.Private public void write(DataOutput out) throws IOException { out.writeLong(length); @@ -86,7 +86,7 @@ public void write(DataOutput out) throws IOException { out.writeLong(spaceQuota); } - /** {@inheritDoc} */ + @Override @InterfaceAudience.Private public void readFields(DataInput in) throws IOException { this.length = in.readLong(); @@ -131,7 +131,7 @@ public static String getHeader(boolean qOption) { return qOption ? QUOTA_HEADER : HEADER; } - /** {@inheritDoc} */ + @Override public String toString() { return toString(true); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java index 9949834222..c552f331f8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java @@ -131,6 +131,7 @@ public String getMount() throws IOException { return mount; } + @Override public String toString() { return "df -k " + mount +"\n" + diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DU.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DU.java index 5caec7204d..2c96b0abaf 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DU.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DU.java @@ -76,6 +76,7 @@ public DU(File path, Configuration conf) throws IOException { **/ class DURefreshThread implements Runnable { + @Override public void run() { while(shouldRun) { @@ -169,16 +170,19 @@ public void shutdown() { } } + @Override public String toString() { return "du -sk " + dirPath +"\n" + used + "\t" + dirPath; } + @Override protected String[] getExecString() { return new String[] {"du", "-sk", dirPath}; } + @Override protected void parseExecResult(BufferedReader lines) throws IOException { String line = lines.readLine(); if (line == null) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java index e47dffb082..eef53140c3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java @@ -44,6 +44,7 @@ public FSDataInputStream(InputStream in) * * @param desired offset to seek to */ + @Override public synchronized void seek(long desired) throws IOException { ((Seekable)in).seek(desired); } @@ -53,6 +54,7 @@ public synchronized void seek(long desired) throws IOException { * * @return current position in the input stream */ + @Override public long getPos() throws IOException { return ((Seekable)in).getPos(); } @@ -68,6 +70,7 @@ public long getPos() throws IOException { * if there is no more data because the end of the stream has been * reached */ + @Override public int read(long position, byte[] buffer, int offset, int length) throws IOException { return ((PositionedReadable)in).read(position, buffer, offset, length); @@ -85,6 +88,7 @@ public int read(long position, byte[] buffer, int offset, int length) * If an exception is thrown an undetermined number * of bytes in the buffer may have been written. */ + @Override public void readFully(long position, byte[] buffer, int offset, int length) throws IOException { ((PositionedReadable)in).readFully(position, buffer, offset, length); @@ -93,6 +97,7 @@ public void readFully(long position, byte[] buffer, int offset, int length) /** * See {@link #readFully(long, byte[], int, int)}. */ + @Override public void readFully(long position, byte[] buffer) throws IOException { ((PositionedReadable)in).readFully(position, buffer, 0, buffer.length); @@ -104,6 +109,7 @@ public void readFully(long position, byte[] buffer) * @param targetPos position to seek to * @return true if a new source is found, false otherwise */ + @Override public boolean seekToNewSource(long targetPos) throws IOException { return ((Seekable)in).seekToNewSource(targetPos); } @@ -118,6 +124,7 @@ public InputStream getWrappedStream() { return in; } + @Override public int read(ByteBuffer buf) throws IOException { if (in instanceof ByteBufferReadable) { return ((ByteBufferReadable)in).read(buf); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java index 9974f27e24..cc992e7c94 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java @@ -140,6 +140,7 @@ protected synchronized boolean needChecksum() { * @exception IOException if an I/O error occurs. */ + @Override public synchronized int read() throws IOException { if (pos >= count) { fill(); @@ -180,6 +181,7 @@ public synchronized int read() throws IOException { * @exception IOException if an I/O error occurs. * ChecksumException if any checksum error occurs */ + @Override public synchronized int read(byte[] b, int off, int len) throws IOException { // parameter check if ((off | len | (off + len) | (b.length - (off + len))) < 0) { @@ -367,6 +369,7 @@ public synchronized int available() throws IOException { * @exception IOException if an I/O error occurs. * ChecksumException if the chunk to skip to is corrupted */ + @Override public synchronized long skip(long n) throws IOException { if (n <= 0) { return 0; @@ -389,6 +392,7 @@ public synchronized long skip(long n) throws IOException { * ChecksumException if the chunk to seek to is corrupted */ + @Override public synchronized void seek(long pos) throws IOException { if( pos<0 ) { return; @@ -462,13 +466,16 @@ final protected synchronized void set(boolean verifyChecksum, this.pos = 0; } + @Override final public boolean markSupported() { return false; } + @Override final public void mark(int readlimit) { } + @Override final public void reset() throws IOException { throw new IOException("mark/reset not supported"); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputStream.java index f7bc22159d..8d668feeab 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputStream.java @@ -36,19 +36,23 @@ public abstract class FSInputStream extends InputStream * The next read() will be from that location. Can't * seek past the end of the file. */ + @Override public abstract void seek(long pos) throws IOException; /** * Return the current offset from the start of the file */ + @Override public abstract long getPos() throws IOException; /** * Seeks a different copy of the data. Returns true if * found a new source, false otherwise. */ + @Override public abstract boolean seekToNewSource(long targetPos) throws IOException; + @Override public int read(long position, byte[] buffer, int offset, int length) throws IOException { synchronized (this) { @@ -64,6 +68,7 @@ public int read(long position, byte[] buffer, int offset, int length) } } + @Override public void readFully(long position, byte[] buffer, int offset, int length) throws IOException { int nread = 0; @@ -76,6 +81,7 @@ public void readFully(long position, byte[] buffer, int offset, int length) } } + @Override public void readFully(long position, byte[] buffer) throws IOException { readFully(position, buffer, 0, buffer.length); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java index 66b6a74916..d494f30de7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java @@ -55,6 +55,7 @@ protected abstract void writeChunk(byte[] b, int offset, int len, byte[] checksu throws IOException; /** Write one byte */ + @Override public synchronized void write(int b) throws IOException { sum.update(b); buf[count++] = (byte)b; @@ -81,6 +82,7 @@ public synchronized void write(int b) throws IOException { * @param len the number of bytes to write. * @exception IOException if an I/O error occurs. */ + @Override public synchronized void write(byte b[], int off, int len) throws IOException { if (off < 0 || len < 0 || off > b.length - len) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java index 2b248bdcf2..149a3e3a4a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java @@ -37,6 +37,7 @@ public abstract class FileChecksum implements Writable { public abstract byte[] getBytes(); /** Return true if both the algorithms and the values are the same. */ + @Override public boolean equals(Object other) { if (other == this) { return true; @@ -50,7 +51,7 @@ public boolean equals(Object other) { && Arrays.equals(this.getBytes(), that.getBytes()); } - /** {@inheritDoc} */ + @Override public int hashCode() { return getAlgorithmName().hashCode() ^ Arrays.hashCode(getBytes()); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java index 4e5057a4e9..5cfce9b019 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java @@ -190,6 +190,7 @@ public final class FileContext { new FileContextFinalizer(); private static final PathFilter DEFAULT_FILTER = new PathFilter() { + @Override public boolean accept(final Path file) { return true; } @@ -318,6 +319,7 @@ private static AbstractFileSystem getAbstractFileSystem( throws UnsupportedFileSystemException, IOException { try { return user.doAs(new PrivilegedExceptionAction() { + @Override public AbstractFileSystem run() throws UnsupportedFileSystemException { return AbstractFileSystem.get(uri, conf); } @@ -660,6 +662,7 @@ public FSDataOutputStream create(final Path f, final CreateOpts[] updatedOpts = CreateOpts.setOpt(CreateOpts.perms(permission), opts); return new FSLinkResolver() { + @Override public FSDataOutputStream next(final AbstractFileSystem fs, final Path p) throws IOException { return fs.create(p, createFlag, updatedOpts); @@ -703,6 +706,7 @@ public void mkdir(final Path dir, final FsPermission permission, final FsPermission absFerms = (permission == null ? FsPermission.getDefault() : permission).applyUMask(umask); new FSLinkResolver() { + @Override public Void next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { fs.mkdir(p, absFerms, createParent); @@ -738,6 +742,7 @@ public boolean delete(final Path f, final boolean recursive) UnsupportedFileSystemException, IOException { Path absF = fixRelativePart(f); return new FSLinkResolver() { + @Override public Boolean next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { return Boolean.valueOf(fs.delete(p, recursive)); @@ -766,6 +771,7 @@ public FSDataInputStream open(final Path f) throws AccessControlException, FileNotFoundException, UnsupportedFileSystemException, IOException { final Path absF = fixRelativePart(f); return new FSLinkResolver() { + @Override public FSDataInputStream next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { return fs.open(p); @@ -796,6 +802,7 @@ public FSDataInputStream open(final Path f, final int bufferSize) UnsupportedFileSystemException, IOException { final Path absF = fixRelativePart(f); return new FSLinkResolver() { + @Override public FSDataInputStream next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { return fs.open(p, bufferSize); @@ -826,6 +833,7 @@ public boolean setReplication(final Path f, final short replication) IOException { final Path absF = fixRelativePart(f); return new FSLinkResolver() { + @Override public Boolean next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { return Boolean.valueOf(fs.setReplication(p, replication)); @@ -894,6 +902,7 @@ public void rename(final Path src, final Path dst, */ final Path source = resolveIntermediate(absSrc); new FSLinkResolver() { + @Override public Void next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { fs.rename(source, p, options); @@ -925,6 +934,7 @@ public void setPermission(final Path f, final FsPermission permission) UnsupportedFileSystemException, IOException { final Path absF = fixRelativePart(f); new FSLinkResolver() { + @Override public Void next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { fs.setPermission(p, permission); @@ -967,6 +977,7 @@ public void setOwner(final Path f, final String username, } final Path absF = fixRelativePart(f); new FSLinkResolver() { + @Override public Void next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { fs.setOwner(p, username, groupname); @@ -1002,6 +1013,7 @@ public void setTimes(final Path f, final long mtime, final long atime) UnsupportedFileSystemException, IOException { final Path absF = fixRelativePart(f); new FSLinkResolver() { + @Override public Void next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { fs.setTimes(p, mtime, atime); @@ -1034,6 +1046,7 @@ public FileChecksum getFileChecksum(final Path f) IOException { final Path absF = fixRelativePart(f); return new FSLinkResolver() { + @Override public FileChecksum next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { return fs.getFileChecksum(p); @@ -1089,6 +1102,7 @@ public FileStatus getFileStatus(final Path f) throws AccessControlException, FileNotFoundException, UnsupportedFileSystemException, IOException { final Path absF = fixRelativePart(f); return new FSLinkResolver() { + @Override public FileStatus next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { return fs.getFileStatus(p); @@ -1135,6 +1149,7 @@ public FileStatus getFileLinkStatus(final Path f) UnsupportedFileSystemException, IOException { final Path absF = fixRelativePart(f); return new FSLinkResolver() { + @Override public FileStatus next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { FileStatus fi = fs.getFileLinkStatus(p); @@ -1165,6 +1180,7 @@ public Path getLinkTarget(final Path f) throws AccessControlException, FileNotFoundException, UnsupportedFileSystemException, IOException { final Path absF = fixRelativePart(f); return new FSLinkResolver() { + @Override public Path next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { FileStatus fi = fs.getFileLinkStatus(p); @@ -1208,6 +1224,7 @@ public BlockLocation[] getFileBlockLocations(final Path f, final long start, UnsupportedFileSystemException, IOException { final Path absF = fixRelativePart(f); return new FSLinkResolver() { + @Override public BlockLocation[] next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { return fs.getFileBlockLocations(p, start, len); @@ -1246,6 +1263,7 @@ public FsStatus getFsStatus(final Path f) throws AccessControlException, } final Path absF = fixRelativePart(f); return new FSLinkResolver() { + @Override public FsStatus next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { return fs.getFsStatus(p); @@ -1339,6 +1357,7 @@ public void createSymlink(final Path target, final Path link, IOException { final Path nonRelLink = fixRelativePart(link); new FSLinkResolver() { + @Override public Void next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { fs.createSymlink(target, p, createParent); @@ -1373,6 +1392,7 @@ public RemoteIterator listStatus(final Path f) throws UnsupportedFileSystemException, IOException { final Path absF = fixRelativePart(f); return new FSLinkResolver>() { + @Override public RemoteIterator next( final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { @@ -1432,6 +1452,7 @@ public RemoteIterator listLocatedStatus( UnsupportedFileSystemException, IOException { final Path absF = fixRelativePart(f); return new FSLinkResolver>() { + @Override public RemoteIterator next( final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { @@ -1703,6 +1724,7 @@ public FileStatus[] listStatus(final Path f) throws AccessControlException, IOException { final Path absF = fixRelativePart(f); return new FSLinkResolver() { + @Override public FileStatus[] next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { return fs.listStatus(p); @@ -2232,6 +2254,7 @@ private static boolean isSameFS(Path qualPath1, Path qualPath2) { * Deletes all the paths in deleteOnExit on JVM shutdown. */ static class FileContextFinalizer implements Runnable { + @Override public synchronized void run() { processDeleteOnExit(); } @@ -2244,6 +2267,7 @@ public synchronized void run() { protected Path resolve(final Path f) throws FileNotFoundException, UnresolvedLinkException, AccessControlException, IOException { return new FSLinkResolver() { + @Override public Path next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { return fs.resolvePath(p); @@ -2259,6 +2283,7 @@ public Path next(final AbstractFileSystem fs, final Path p) */ protected Path resolveIntermediate(final Path f) throws IOException { return new FSLinkResolver() { + @Override public FileStatus next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { return fs.getFileLinkStatus(p); @@ -2281,6 +2306,7 @@ Set resolveAbstractFileSystems(final Path f) final HashSet result = new HashSet(); new FSLinkResolver() { + @Override public Void next(final AbstractFileSystem fs, final Path p) throws IOException, UnresolvedLinkException { result.add(fs); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java index 2757475faf..5445f6eb15 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java @@ -253,6 +253,7 @@ public void setSymlink(final Path p) { ////////////////////////////////////////////////// // Writable ////////////////////////////////////////////////// + @Override public void write(DataOutput out) throws IOException { Text.writeString(out, getPath().toString(), Text.DEFAULT_MAX_LEN); out.writeLong(getLen()); @@ -270,6 +271,7 @@ public void write(DataOutput out) throws IOException { } } + @Override public void readFields(DataInput in) throws IOException { String strPath = Text.readString(in, Text.DEFAULT_MAX_LEN); this.path = new Path(strPath); @@ -299,6 +301,7 @@ public void readFields(DataInput in) throws IOException { * @throws ClassCastException if the specified object's is not of * type FileStatus */ + @Override public int compareTo(Object o) { FileStatus other = (FileStatus)o; return this.getPath().compareTo(other.getPath()); @@ -308,6 +311,7 @@ public int compareTo(Object o) { * @param o the object to be compared. * @return true if two file status has the same path name; false if not. */ + @Override public boolean equals(Object o) { if (o == null) { return false; @@ -328,6 +332,7 @@ public boolean equals(Object o) { * * @return a hash code value for the path name. */ + @Override public int hashCode() { return getPath().hashCode(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java index 31b59439a9..ff9f2db1ff 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java @@ -147,6 +147,7 @@ public static FileSystem get(final URI uri, final Configuration conf, UserGroupInformation ugi = UserGroupInformation.getBestUGI(ticketCachePath, user); return ugi.doAs(new PrivilegedExceptionAction() { + @Override public FileSystem run() throws IOException { return get(uri, conf); } @@ -332,6 +333,7 @@ public static FileSystem newInstance(final URI uri, final Configuration conf, UserGroupInformation ugi = UserGroupInformation.getBestUGI(ticketCachePath, user); return ugi.doAs(new PrivilegedExceptionAction() { + @Override public FileSystem run() throws IOException { return newInstance(uri,conf); } @@ -1389,6 +1391,7 @@ public ContentSummary getContentSummary(Path f) throws IOException { } final private static PathFilter DEFAULT_FILTER = new PathFilter() { + @Override public boolean accept(Path file) { return true; } @@ -2056,6 +2059,7 @@ public void completeLocalOutput(Path fsOutputFile, Path tmpLocalFile) * No more filesystem operations are needed. Will * release any held locks. */ + @Override public void close() throws IOException { // delete all files that were marked as delete-on-exit. processDeleteOnExit(); @@ -2393,6 +2397,7 @@ synchronized void closeAll(boolean onlyAutomatic) throws IOException { } private class ClientFinalizer implements Runnable { + @Override public synchronized void run() { try { closeAll(true); @@ -2447,7 +2452,7 @@ static class Key { this.ugi = UserGroupInformation.getCurrentUser(); } - /** {@inheritDoc} */ + @Override public int hashCode() { return (scheme + authority).hashCode() + ugi.hashCode() + (int)unique; } @@ -2456,7 +2461,7 @@ static boolean isEqual(Object a, Object b) { return a == b || (a != null && a.equals(b)); } - /** {@inheritDoc} */ + @Override public boolean equals(Object obj) { if (obj == this) { return true; @@ -2471,7 +2476,7 @@ && isEqual(this.ugi, that.ugi) return false; } - /** {@inheritDoc} */ + @Override public String toString() { return "("+ugi.toString() + ")@" + scheme + "://" + authority; } @@ -2584,6 +2589,7 @@ public int getWriteOps() { return writeOps.get(); } + @Override public String toString() { return bytesRead + " bytes read, " + bytesWritten + " bytes written, " + readOps + " read ops, " + largeReadOps + " large read ops, " diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java index ba9bb4eafe..b6a2acae49 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java @@ -414,9 +414,11 @@ private static class CygPathCommand extends Shell { String getResult() throws IOException { return result; } + @Override protected String[] getExecString() { return command; } + @Override protected void parseExecResult(BufferedReader lines) throws IOException { String line = lines.readLine(); if (line == null) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java index c2ecd20b5a..6e1e099cb0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java @@ -76,6 +76,7 @@ public FileSystem getRawFileSystem() { * for this FileSystem * @param conf the configuration */ + @Override public void initialize(URI name, Configuration conf) throws IOException { super.initialize(name, conf); // this is less than ideal, but existing filesystems sometimes neglect @@ -90,6 +91,7 @@ public void initialize(URI name, Configuration conf) throws IOException { } /** Returns a URI whose scheme and authority identify this FileSystem.*/ + @Override public URI getUri() { return fs.getUri(); } @@ -104,6 +106,7 @@ protected URI getCanonicalUri() { } /** Make sure that a path specifies a FileSystem. */ + @Override public Path makeQualified(Path path) { Path fqPath = fs.makeQualified(path); // swap in our scheme if the filtered fs is using a different scheme @@ -125,10 +128,12 @@ public Path makeQualified(Path path) { /////////////////////////////////////////////////////////////// /** Check that a Path belongs to this FileSystem. */ + @Override protected void checkPath(Path path) { fs.checkPath(path); } + @Override public BlockLocation[] getFileBlockLocations(FileStatus file, long start, long len) throws IOException { return fs.getFileBlockLocations(file, start, len); @@ -143,17 +148,17 @@ public Path resolvePath(final Path p) throws IOException { * @param f the file name to open * @param bufferSize the size of the buffer to be used. */ + @Override public FSDataInputStream open(Path f, int bufferSize) throws IOException { return fs.open(f, bufferSize); } - /** {@inheritDoc} */ + @Override public FSDataOutputStream append(Path f, int bufferSize, Progressable progress) throws IOException { return fs.append(f, bufferSize, progress); } - /** {@inheritDoc} */ @Override public FSDataOutputStream create(Path f, FsPermission permission, boolean overwrite, int bufferSize, short replication, long blockSize, @@ -171,6 +176,7 @@ public FSDataOutputStream create(Path f, FsPermission permission, * @return true if successful; * false if file does not exist or is a directory */ + @Override public boolean setReplication(Path src, short replication) throws IOException { return fs.setReplication(src, replication); } @@ -179,23 +185,23 @@ public boolean setReplication(Path src, short replication) throws IOException { * Renames Path src to Path dst. Can take place on local fs * or remote DFS. */ + @Override public boolean rename(Path src, Path dst) throws IOException { return fs.rename(src, dst); } /** Delete a file */ + @Override public boolean delete(Path f, boolean recursive) throws IOException { return fs.delete(f, recursive); } /** List files in a directory. */ + @Override public FileStatus[] listStatus(Path f) throws IOException { return fs.listStatus(f); } - /** - * {@inheritDoc} - */ @Override public RemoteIterator listCorruptFileBlocks(Path path) throws IOException { @@ -203,11 +209,13 @@ public RemoteIterator listCorruptFileBlocks(Path path) } /** List files and its block locations in a directory. */ + @Override public RemoteIterator listLocatedStatus(Path f) throws IOException { return fs.listLocatedStatus(f); } + @Override public Path getHomeDirectory() { return fs.getHomeDirectory(); } @@ -219,6 +227,7 @@ public Path getHomeDirectory() { * * @param newDir */ + @Override public void setWorkingDirectory(Path newDir) { fs.setWorkingDirectory(newDir); } @@ -228,21 +237,21 @@ public void setWorkingDirectory(Path newDir) { * * @return the directory pathname */ + @Override public Path getWorkingDirectory() { return fs.getWorkingDirectory(); } + @Override protected Path getInitialWorkingDirectory() { return fs.getInitialWorkingDirectory(); } - /** {@inheritDoc} */ @Override public FsStatus getStatus(Path p) throws IOException { return fs.getStatus(p); } - /** {@inheritDoc} */ @Override public boolean mkdirs(Path f, FsPermission permission) throws IOException { return fs.mkdirs(f, permission); @@ -254,6 +263,7 @@ public boolean mkdirs(Path f, FsPermission permission) throws IOException { * the given dst name. * delSrc indicates if the source should be removed */ + @Override public void copyFromLocalFile(boolean delSrc, Path src, Path dst) throws IOException { fs.copyFromLocalFile(delSrc, src, dst); @@ -264,6 +274,7 @@ public void copyFromLocalFile(boolean delSrc, Path src, Path dst) * the given dst name. * delSrc indicates if the source should be removed */ + @Override public void copyFromLocalFile(boolean delSrc, boolean overwrite, Path[] srcs, Path dst) throws IOException { @@ -275,6 +286,7 @@ public void copyFromLocalFile(boolean delSrc, boolean overwrite, * the given dst name. * delSrc indicates if the source should be removed */ + @Override public void copyFromLocalFile(boolean delSrc, boolean overwrite, Path src, Path dst) throws IOException { @@ -286,6 +298,7 @@ public void copyFromLocalFile(boolean delSrc, boolean overwrite, * Copy it from FS control to the local dst name. * delSrc indicates if the src will be removed or not. */ + @Override public void copyToLocalFile(boolean delSrc, Path src, Path dst) throws IOException { fs.copyToLocalFile(delSrc, src, dst); @@ -297,6 +310,7 @@ public void copyToLocalFile(boolean delSrc, Path src, Path dst) * file. If the FS is local, we write directly into the target. If * the FS is remote, we write into the tmp local area. */ + @Override public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile) throws IOException { return fs.startLocalOutput(fsOutputFile, tmpLocalFile); @@ -308,12 +322,14 @@ public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile) * FS will copy the contents of tmpLocalFile to the correct target at * fsOutputFile. */ + @Override public void completeLocalOutput(Path fsOutputFile, Path tmpLocalFile) throws IOException { fs.completeLocalOutput(fsOutputFile, tmpLocalFile); } /** Return the total size of all files in the filesystem.*/ + @Override public long getUsed() throws IOException{ return fs.getUsed(); } @@ -357,16 +373,17 @@ public FsServerDefaults getServerDefaults(Path f) throws IOException { /** * Get file status. */ + @Override public FileStatus getFileStatus(Path f) throws IOException { return fs.getFileStatus(f); } - /** {@inheritDoc} */ + @Override public FileChecksum getFileChecksum(Path f) throws IOException { return fs.getFileChecksum(f); } - /** {@inheritDoc} */ + @Override public void setVerifyChecksum(boolean verifyChecksum) { fs.setVerifyChecksum(verifyChecksum); } @@ -387,21 +404,18 @@ public void close() throws IOException { fs.close(); } - /** {@inheritDoc} */ @Override public void setOwner(Path p, String username, String groupname ) throws IOException { fs.setOwner(p, username, groupname); } - /** {@inheritDoc} */ @Override public void setTimes(Path p, long mtime, long atime ) throws IOException { fs.setTimes(p, mtime, atime); } - /** {@inheritDoc} */ @Override public void setPermission(Path p, FsPermission permission ) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java index 6cfc11b1fa..9637b6b913 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFs.java @@ -174,9 +174,6 @@ public FileStatus[] listStatus(Path f) return myFs.listStatus(f); } - /** - * {@inheritDoc} - */ @Override public RemoteIterator listCorruptFileBlocks(Path path) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsServerDefaults.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsServerDefaults.java index 637697b83d..c1b9071bbc 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsServerDefaults.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsServerDefaults.java @@ -39,6 +39,7 @@ public class FsServerDefaults implements Writable { static { // register a ctor WritableFactories.setFactory(FsServerDefaults.class, new WritableFactory() { + @Override public Writable newInstance() { return new FsServerDefaults(); } @@ -106,6 +107,7 @@ public DataChecksum.Type getChecksumType() { // ///////////////////////////////////////// // Writable // ///////////////////////////////////////// + @Override @InterfaceAudience.Private public void write(DataOutput out) throws IOException { out.writeLong(blockSize); @@ -116,6 +118,7 @@ public void write(DataOutput out) throws IOException { WritableUtils.writeEnum(out, checksumType); } + @Override @InterfaceAudience.Private public void readFields(DataInput in) throws IOException { blockSize = in.readLong(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java index 4da32789e5..0db1f9e431 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java @@ -236,6 +236,7 @@ private void printInstanceHelp(PrintStream out, Command instance) { /** * run */ + @Override public int run(String argv[]) throws Exception { // initialize FsShell init(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java index 8b9de78fe0..d392c7d765 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java @@ -60,12 +60,14 @@ public long getRemaining() { ////////////////////////////////////////////////// // Writable ////////////////////////////////////////////////// + @Override public void write(DataOutput out) throws IOException { out.writeLong(capacity); out.writeLong(used); out.writeLong(remaining); } + @Override public void readFields(DataInput in) throws IOException { capacity = in.readLong(); used = in.readLong(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlConnection.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlConnection.java index 65c608ddec..90e75b0ccb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlConnection.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlConnection.java @@ -53,7 +53,6 @@ public void connect() throws IOException { } } - /* @inheritDoc */ @Override public InputStream getInputStream() throws IOException { if (is == null) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java index b9a5f1a2cc..2a9208ea5b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsUrlStreamHandlerFactory.java @@ -59,6 +59,7 @@ public FsUrlStreamHandlerFactory(Configuration conf) { this.handler = new FsUrlStreamHandler(this.conf); } + @Override public java.net.URLStreamHandler createURLStreamHandler(String protocol) { if (!protocols.containsKey(protocol)) { boolean known = true; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobFilter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobFilter.java index 5afa9e911d..24bff5f9cf 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobFilter.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobFilter.java @@ -31,6 +31,7 @@ @InterfaceStability.Evolving public class GlobFilter implements PathFilter { private final static PathFilter DEFAULT_FILTER = new PathFilter() { + @Override public boolean accept(Path file) { return true; } @@ -75,6 +76,7 @@ boolean hasPattern() { return pattern.hasWildcard(); } + @Override public boolean accept(Path path) { return pattern.matches(path.getName()) && userFilter.accept(path); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java index 8e03fc35a9..9504e1fda6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java @@ -106,6 +106,7 @@ public HarFileSystem(FileSystem fs) { * har:///archivepath. This assumes the underlying filesystem * to be used in case not specified. */ + @Override public void initialize(URI name, Configuration conf) throws IOException { // decode the name URI underLyingURI = decodeHarURI(name, conf); @@ -247,6 +248,7 @@ private String decodeFileName(String fname) /** * return the top level archive. */ + @Override public Path getWorkingDirectory() { return new Path(uri.toString()); } @@ -636,6 +638,7 @@ private HarStatus getFileHarStatus(Path f) throws IOException { /** * @return null since no checksum algorithm is implemented. */ + @Override public FileChecksum getFileChecksum(Path f) { return null; } @@ -668,6 +671,7 @@ public FSDataOutputStream create(Path f, int bufferSize) throw new IOException("Har: Create not allowed"); } + @Override public FSDataOutputStream create(Path f, FsPermission permission, boolean overwrite, @@ -735,10 +739,12 @@ public FileStatus[] listStatus(Path f) throws IOException { /** * return the top level archive path. */ + @Override public Path getHomeDirectory() { return new Path(uri.toString()); } + @Override public void setWorkingDirectory(Path newDir) { //does nothing. } @@ -746,6 +752,7 @@ public void setWorkingDirectory(Path newDir) { /** * not implemented. */ + @Override public boolean mkdirs(Path f, FsPermission permission) throws IOException { throw new IOException("Har: mkdirs not allowed"); } @@ -753,6 +760,7 @@ public boolean mkdirs(Path f, FsPermission permission) throws IOException { /** * not implemented. */ + @Override public void copyFromLocalFile(boolean delSrc, Path src, Path dst) throws IOException { throw new IOException("Har: copyfromlocalfile not allowed"); @@ -761,6 +769,7 @@ public void copyFromLocalFile(boolean delSrc, Path src, Path dst) throws /** * copies the file in the har filesystem to a local file. */ + @Override public void copyToLocalFile(boolean delSrc, Path src, Path dst) throws IOException { FileUtil.copy(this, src, getLocal(getConf()), dst, false, getConf()); @@ -769,6 +778,7 @@ public void copyToLocalFile(boolean delSrc, Path src, Path dst) /** * not implemented. */ + @Override public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile) throws IOException { throw new IOException("Har: startLocalOutput not allowed"); @@ -777,6 +787,7 @@ public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile) /** * not implemented. */ + @Override public void completeLocalOutput(Path fsOutputFile, Path tmpLocalFile) throws IOException { throw new IOException("Har: completeLocalOutput not allowed"); @@ -785,6 +796,7 @@ public void completeLocalOutput(Path fsOutputFile, Path tmpLocalFile) /** * not implemented. */ + @Override public void setOwner(Path p, String username, String groupname) throws IOException { throw new IOException("Har: setowner not allowed"); @@ -793,6 +805,7 @@ public void setOwner(Path p, String username, String groupname) /** * Not implemented. */ + @Override public void setPermission(Path p, FsPermission permisssion) throws IOException { throw new IOException("Har: setPermission not allowed"); @@ -825,6 +838,7 @@ private static class HarFsInputStream extends FSInputStream { this.end = start + length; } + @Override public synchronized int available() throws IOException { long remaining = end - underLyingStream.getPos(); if (remaining > (long)Integer.MAX_VALUE) { @@ -833,6 +847,7 @@ public synchronized int available() throws IOException { return (int) remaining; } + @Override public synchronized void close() throws IOException { underLyingStream.close(); super.close(); @@ -847,15 +862,18 @@ public void mark(int readLimit) { /** * reset is not implemented */ + @Override public void reset() throws IOException { throw new IOException("reset not implemented."); } + @Override public synchronized int read() throws IOException { int ret = read(oneBytebuff, 0, 1); return (ret <= 0) ? -1: (oneBytebuff[0] & 0xff); } + @Override public synchronized int read(byte[] b) throws IOException { int ret = read(b, 0, b.length); if (ret != -1) { @@ -867,6 +885,7 @@ public synchronized int read(byte[] b) throws IOException { /** * */ + @Override public synchronized int read(byte[] b, int offset, int len) throws IOException { int newlen = len; @@ -882,6 +901,7 @@ public synchronized int read(byte[] b, int offset, int len) return ret; } + @Override public synchronized long skip(long n) throws IOException { long tmpN = n; if (tmpN > 0) { @@ -895,10 +915,12 @@ public synchronized long skip(long n) throws IOException { return (tmpN < 0)? -1 : 0; } + @Override public synchronized long getPos() throws IOException { return (position - start); } + @Override public synchronized void seek(long pos) throws IOException { if (pos < 0 || (start + pos > end)) { throw new IOException("Failed to seek: EOF"); @@ -907,6 +929,7 @@ public synchronized void seek(long pos) throws IOException { underLyingStream.seek(position); } + @Override public boolean seekToNewSource(long targetPos) throws IOException { //do not need to implement this // hdfs in itself does seektonewsource @@ -917,6 +940,7 @@ public boolean seekToNewSource(long targetPos) throws IOException { /** * implementing position readable. */ + @Override public int read(long pos, byte[] b, int offset, int length) throws IOException { int nlength = length; @@ -929,6 +953,7 @@ public int read(long pos, byte[] b, int offset, int length) /** * position readable again. */ + @Override public void readFully(long pos, byte[] b, int offset, int length) throws IOException { if (start + length + pos > end) { @@ -937,6 +962,7 @@ public void readFully(long pos, byte[] b, int offset, int length) underLyingStream.readFully(pos + start, b, offset, length); } + @Override public void readFully(long pos, byte[] b) throws IOException { readFully(pos, b, 0, b.length); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java index 394c01f705..7db348c557 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java @@ -91,6 +91,7 @@ public void copyToLocalFile(boolean delSrc, Path src, Path dst) * Moves files to a bad file directory on the same device, so that their * storage will not be reused. */ + @Override public boolean reportChecksumFailure(Path p, FSDataInputStream in, long inPos, FSDataInputStream sums, long sumsPos) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocatedFileStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocatedFileStatus.java index b0779ed82f..01368944a4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocatedFileStatus.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocatedFileStatus.java @@ -94,6 +94,7 @@ public BlockLocation[] getBlockLocations() { * @throws ClassCastException if the specified object's is not of * type FileStatus */ + @Override public int compareTo(Object o) { return super.compareTo(o); } @@ -102,6 +103,7 @@ public int compareTo(Object o) { * @param o the object to be compared. * @return true if two file status has the same path name; false if not. */ + @Override public boolean equals(Object o) { return super.equals(o); } @@ -112,6 +114,7 @@ public boolean equals(Object o) { * * @return a hash code value for the path name. */ + @Override public int hashCode() { return super.hashCode(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java index 1c697b7f52..5bddb96f0c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java @@ -57,7 +57,7 @@ public MD5MD5CRC32FileChecksum(int bytesPerCRC, long crcPerBlock, MD5Hash md5) { this.md5 = md5; } - /** {@inheritDoc} */ + @Override public String getAlgorithmName() { return "MD5-of-" + crcPerBlock + "MD5-of-" + bytesPerCRC + getCrcType().name(); @@ -73,11 +73,11 @@ public static DataChecksum.Type getCrcTypeFromAlgorithmName(String algorithm) throw new IOException("Unknown checksum type in " + algorithm); } - - /** {@inheritDoc} */ + + @Override public int getLength() {return LENGTH;} - - /** {@inheritDoc} */ + + @Override public byte[] getBytes() { return WritableUtils.toByteArray(this); } @@ -92,14 +92,14 @@ public ChecksumOpt getChecksumOpt() { return new ChecksumOpt(getCrcType(), bytesPerCRC); } - /** {@inheritDoc} */ + @Override public void readFields(DataInput in) throws IOException { bytesPerCRC = in.readInt(); crcPerBlock = in.readLong(); md5 = MD5Hash.read(in); } - - /** {@inheritDoc} */ + + @Override public void write(DataOutput out) throws IOException { out.writeInt(bytesPerCRC); out.writeLong(crcPerBlock); @@ -161,8 +161,8 @@ public static MD5MD5CRC32FileChecksum valueOf(Attributes attrs + ", md5=" + md5, e); } } - - /** {@inheritDoc} */ + + @Override public String toString() { return getAlgorithmName() + ":" + md5; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java index 173e16ea41..8464e51270 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java @@ -22,7 +22,6 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.Progressable; -import org.apache.hadoop.HadoopIllegalArgumentException; /** * This class contains options related to file system operations. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java index 74c85af48b..c0ebebfe67 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java @@ -261,6 +261,7 @@ public Path suffix(String suffix) { return new Path(getParent(), getName()+suffix); } + @Override public String toString() { // we can't use uri.toString(), which escapes everything, because we want // illegal characters unescaped in the string, for glob processing, etc. @@ -289,6 +290,7 @@ public String toString() { return buffer.toString(); } + @Override public boolean equals(Object o) { if (!(o instanceof Path)) { return false; @@ -297,10 +299,12 @@ public boolean equals(Object o) { return this.uri.equals(that.uri); } + @Override public int hashCode() { return uri.hashCode(); } + @Override public int compareTo(Object o) { Path that = (Path)o; return this.uri.compareTo(that.uri); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java index 38e991480a..b33b1a778f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java @@ -72,8 +72,10 @@ public File pathToFile(Path path) { return new File(path.toUri().getPath()); } + @Override public URI getUri() { return NAME; } + @Override public void initialize(URI uri, Configuration conf) throws IOException { super.initialize(uri, conf); setConf(conf); @@ -84,6 +86,7 @@ public TrackingFileInputStream(File f) throws IOException { super(f); } + @Override public int read() throws IOException { int result = super.read(); if (result != -1) { @@ -92,6 +95,7 @@ public int read() throws IOException { return result; } + @Override public int read(byte[] data) throws IOException { int result = super.read(data); if (result != -1) { @@ -100,6 +104,7 @@ public int read(byte[] data) throws IOException { return result; } + @Override public int read(byte[] data, int offset, int length) throws IOException { int result = super.read(data, offset, length); if (result != -1) { @@ -120,15 +125,18 @@ public LocalFSFileInputStream(Path f) throws IOException { this.fis = new TrackingFileInputStream(pathToFile(f)); } + @Override public void seek(long pos) throws IOException { fis.getChannel().position(pos); this.position = pos; } + @Override public long getPos() throws IOException { return this.position; } + @Override public boolean seekToNewSource(long targetPos) throws IOException { return false; } @@ -136,11 +144,14 @@ public boolean seekToNewSource(long targetPos) throws IOException { /* * Just forward to the fis */ + @Override public int available() throws IOException { return fis.available(); } + @Override public void close() throws IOException { fis.close(); } @Override public boolean markSupported() { return false; } + @Override public int read() throws IOException { try { int value = fis.read(); @@ -153,6 +164,7 @@ public int read() throws IOException { } } + @Override public int read(byte[] b, int off, int len) throws IOException { try { int value = fis.read(b, off, len); @@ -165,6 +177,7 @@ public int read(byte[] b, int off, int len) throws IOException { } } + @Override public int read(long position, byte[] b, int off, int len) throws IOException { ByteBuffer bb = ByteBuffer.wrap(b, off, len); @@ -175,6 +188,7 @@ public int read(long position, byte[] b, int off, int len) } } + @Override public long skip(long n) throws IOException { long value = fis.skip(n); if (value > 0) { @@ -189,6 +203,7 @@ public FileDescriptor getFileDescriptor() throws IOException { } } + @Override public FSDataInputStream open(Path f, int bufferSize) throws IOException { if (!exists(f)) { throw new FileNotFoundException(f.toString()); @@ -210,8 +225,11 @@ private LocalFSFileOutputStream(Path f, boolean append) throws IOException { /* * Just forward to the fos */ + @Override public void close() throws IOException { fos.close(); } + @Override public void flush() throws IOException { fos.flush(); } + @Override public void write(byte[] b, int off, int len) throws IOException { try { fos.write(b, off, len); @@ -220,6 +238,7 @@ public void write(byte[] b, int off, int len) throws IOException { } } + @Override public void write(int b) throws IOException { try { fos.write(b); @@ -229,7 +248,7 @@ public void write(int b) throws IOException { } } - /** {@inheritDoc} */ + @Override public FSDataOutputStream append(Path f, int bufferSize, Progressable progress) throws IOException { if (!exists(f)) { @@ -242,7 +261,6 @@ public FSDataOutputStream append(Path f, int bufferSize, new LocalFSFileOutputStream(f, true), bufferSize), statistics); } - /** {@inheritDoc} */ @Override public FSDataOutputStream create(Path f, boolean overwrite, int bufferSize, short replication, long blockSize, Progressable progress) @@ -264,7 +282,6 @@ private FSDataOutputStream create(Path f, boolean overwrite, new LocalFSFileOutputStream(f, false), bufferSize), statistics); } - /** {@inheritDoc} */ @Override public FSDataOutputStream create(Path f, FsPermission permission, boolean overwrite, int bufferSize, short replication, long blockSize, @@ -276,7 +293,6 @@ public FSDataOutputStream create(Path f, FsPermission permission, return out; } - /** {@inheritDoc} */ @Override public FSDataOutputStream createNonRecursive(Path f, FsPermission permission, boolean overwrite, @@ -288,6 +304,7 @@ public FSDataOutputStream createNonRecursive(Path f, FsPermission permission, return out; } + @Override public boolean rename(Path src, Path dst) throws IOException { if (pathToFile(src).renameTo(pathToFile(dst))) { return true; @@ -302,6 +319,7 @@ public boolean rename(Path src, Path dst) throws IOException { * @return true if the file or directory and all its contents were deleted * @throws IOException if p is non-empty and recursive is false */ + @Override public boolean delete(Path p, boolean recursive) throws IOException { File f = pathToFile(p); if (f.isFile()) { @@ -319,6 +337,7 @@ public boolean delete(Path p, boolean recursive) throws IOException { * (Note: Returned list is not sorted in any given order, * due to reliance on Java's {@link File#list()} API.) */ + @Override public FileStatus[] listStatus(Path f) throws IOException { File localf = pathToFile(f); FileStatus[] results; @@ -356,6 +375,7 @@ public FileStatus[] listStatus(Path f) throws IOException { * Creates the specified directory hierarchy. Does not * treat existence as an error. */ + @Override public boolean mkdirs(Path f) throws IOException { if(f == null) { throw new IllegalArgumentException("mkdirs path arg is null"); @@ -373,7 +393,6 @@ public boolean mkdirs(Path f) throws IOException { (p2f.mkdir() || p2f.isDirectory()); } - /** {@inheritDoc} */ @Override public boolean mkdirs(Path f, FsPermission permission) throws IOException { boolean b = mkdirs(f); @@ -418,7 +437,6 @@ protected Path getInitialWorkingDirectory() { return this.makeQualified(new Path(System.getProperty("user.dir"))); } - /** {@inheritDoc} */ @Override public FsStatus getStatus(Path p) throws IOException { File partition = pathToFile(p == null ? new Path("/") : p); @@ -430,29 +448,35 @@ public FsStatus getStatus(Path p) throws IOException { } // In the case of the local filesystem, we can just rename the file. + @Override public void moveFromLocalFile(Path src, Path dst) throws IOException { rename(src, dst); } // We can write output directly to the final location + @Override public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile) throws IOException { return fsOutputFile; } // It's in the right place - nothing to do. + @Override public void completeLocalOutput(Path fsWorkingFile, Path tmpLocalFile) throws IOException { } + @Override public void close() throws IOException { super.close(); } + @Override public String toString() { return "LocalFS"; } + @Override public FileStatus getFileStatus(Path f) throws IOException { File path = pathToFile(f); if (path.exists()) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java index 07870df1a6..1820c6619e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java @@ -263,6 +263,7 @@ private class Emptier implements Runnable { } } + @Override public void run() { if (emptierInterval == 0) return; // trash disabled diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java index 1c19ce27fb..99ca4fbb80 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java @@ -262,6 +262,7 @@ public void close() throws IOException { } /** This optional operation is not yet supported. */ + @Override public FSDataOutputStream append(Path f, int bufferSize, Progressable progress) throws IOException { throw new IOException("Not supported"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java index d3ac019a94..beea508d5d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java @@ -51,19 +51,23 @@ public FTPInputStream(InputStream stream, FTPClient client, this.closed = false; } + @Override public long getPos() throws IOException { return pos; } // We don't support seek. + @Override public void seek(long pos) throws IOException { throw new IOException("Seek not supported"); } + @Override public boolean seekToNewSource(long targetPos) throws IOException { throw new IOException("Seek not supported"); } + @Override public synchronized int read() throws IOException { if (closed) { throw new IOException("Stream closed"); @@ -79,6 +83,7 @@ public synchronized int read() throws IOException { return byteRead; } + @Override public synchronized int read(byte buf[], int off, int len) throws IOException { if (closed) { throw new IOException("Stream closed"); @@ -95,6 +100,7 @@ public synchronized int read(byte buf[], int off, int len) throws IOException { return result; } + @Override public synchronized void close() throws IOException { if (closed) { throw new IOException("Stream closed"); @@ -116,14 +122,17 @@ public synchronized void close() throws IOException { // Not supported. + @Override public boolean markSupported() { return false; } + @Override public void mark(int readLimit) { // Do nothing } + @Override public void reset() throws IOException { throw new IOException("Mark not supported"); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSImpl.java index 88b28ed434..0d77a78c87 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSImpl.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSImpl.java @@ -50,22 +50,27 @@ public KFSImpl(String metaServerHost, int metaServerPort, statistics = stats; } + @Override public boolean exists(String path) throws IOException { return kfsAccess.kfs_exists(path); } + @Override public boolean isDirectory(String path) throws IOException { return kfsAccess.kfs_isDirectory(path); } + @Override public boolean isFile(String path) throws IOException { return kfsAccess.kfs_isFile(path); } + @Override public String[] readdir(String path) throws IOException { return kfsAccess.kfs_readdir(path); } + @Override public FileStatus[] readdirplus(Path path) throws IOException { String srep = path.toUri().getPath(); KfsFileAttr[] fattr = kfsAccess.kfs_readdirplus(srep); @@ -100,52 +105,64 @@ public FileStatus[] readdirplus(Path path) throws IOException { } + @Override public int mkdirs(String path) throws IOException { return kfsAccess.kfs_mkdirs(path); } + @Override public int rename(String source, String dest) throws IOException { return kfsAccess.kfs_rename(source, dest); } + @Override public int rmdir(String path) throws IOException { return kfsAccess.kfs_rmdir(path); } + @Override public int remove(String path) throws IOException { return kfsAccess.kfs_remove(path); } + @Override public long filesize(String path) throws IOException { return kfsAccess.kfs_filesize(path); } + @Override public short getReplication(String path) throws IOException { return kfsAccess.kfs_getReplication(path); } + @Override public short setReplication(String path, short replication) throws IOException { return kfsAccess.kfs_setReplication(path, replication); } + @Override public String[][] getDataLocation(String path, long start, long len) throws IOException { return kfsAccess.kfs_getDataLocation(path, start, len); } + @Override public long getModificationTime(String path) throws IOException { return kfsAccess.kfs_getModificationTime(path); } + @Override public FSDataInputStream open(String path, int bufferSize) throws IOException { return new FSDataInputStream(new KFSInputStream(kfsAccess, path, statistics)); } + @Override public FSDataOutputStream create(String path, short replication, int bufferSize, Progressable progress) throws IOException { return new FSDataOutputStream(new KFSOutputStream(kfsAccess, path, replication, false, progress), statistics); } + @Override public FSDataOutputStream append(String path, int bufferSize, Progressable progress) throws IOException { // when opening for append, # of replicas is ignored return new FSDataOutputStream(new KFSOutputStream(kfsAccess, path, (short) 1, true, progress), diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSInputStream.java index 04c937b848..492230f064 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSInputStream.java @@ -53,6 +53,7 @@ public KFSInputStream(KfsAccess kfsAccess, String path, this.fsize = 0; } + @Override public long getPos() throws IOException { if (kfsChannel == null) { throw new IOException("File closed"); @@ -60,6 +61,7 @@ public long getPos() throws IOException { return kfsChannel.tell(); } + @Override public synchronized int available() throws IOException { if (kfsChannel == null) { throw new IOException("File closed"); @@ -67,6 +69,7 @@ public synchronized int available() throws IOException { return (int) (this.fsize - getPos()); } + @Override public synchronized void seek(long targetPos) throws IOException { if (kfsChannel == null) { throw new IOException("File closed"); @@ -74,10 +77,12 @@ public synchronized void seek(long targetPos) throws IOException { kfsChannel.seek(targetPos); } + @Override public synchronized boolean seekToNewSource(long targetPos) throws IOException { return false; } + @Override public synchronized int read() throws IOException { if (kfsChannel == null) { throw new IOException("File closed"); @@ -93,6 +98,7 @@ public synchronized int read() throws IOException { return -1; } + @Override public synchronized int read(byte b[], int off, int len) throws IOException { if (kfsChannel == null) { throw new IOException("File closed"); @@ -109,6 +115,7 @@ public synchronized int read(byte b[], int off, int len) throws IOException { return res; } + @Override public synchronized void close() throws IOException { if (kfsChannel == null) { return; @@ -118,14 +125,17 @@ public synchronized void close() throws IOException { kfsChannel = null; } + @Override public boolean markSupported() { return false; } + @Override public void mark(int readLimit) { // Do nothing } + @Override public void reset() throws IOException { throw new IOException("Mark not supported"); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSOutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSOutputStream.java index 59cea357e6..a50f750733 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSOutputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/kfs/KFSOutputStream.java @@ -20,15 +20,10 @@ package org.apache.hadoop.fs.kfs; import java.io.*; -import java.net.*; -import java.util.*; import java.nio.ByteBuffer; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.util.Progressable; import org.kosmix.kosmosfs.access.KfsAccess; @@ -60,6 +55,7 @@ public long getPos() throws IOException { return kfsChannel.tell(); } + @Override public void write(int v) throws IOException { if (kfsChannel == null) { throw new IOException("File closed"); @@ -70,6 +66,7 @@ public void write(int v) throws IOException { write(b, 0, 1); } + @Override public void write(byte b[], int off, int len) throws IOException { if (kfsChannel == null) { throw new IOException("File closed"); @@ -80,6 +77,7 @@ public void write(byte b[], int off, int len) throws IOException { kfsChannel.write(ByteBuffer.wrap(b, off, len)); } + @Override public void flush() throws IOException { if (kfsChannel == null) { throw new IOException("File closed"); @@ -89,6 +87,7 @@ public void flush() throws IOException { kfsChannel.sync(); } + @Override public synchronized void close() throws IOException { if (kfsChannel == null) { return; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java index af3d5148d5..972a410b53 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java @@ -40,6 +40,7 @@ public class FsPermission implements Writable { private static final Log LOG = LogFactory.getLog(FsPermission.class); static final WritableFactory FACTORY = new WritableFactory() { + @Override public Writable newInstance() { return new FsPermission(); } }; static { // register a ctor @@ -124,12 +125,12 @@ public void fromShort(short n) { set(v[(n >>> 6) & 7], v[(n >>> 3) & 7], v[n & 7], (((n >>> 9) & 1) == 1) ); } - /** {@inheritDoc} */ + @Override public void write(DataOutput out) throws IOException { out.writeShort(toShort()); } - /** {@inheritDoc} */ + @Override public void readFields(DataInput in) throws IOException { fromShort(in.readShort()); } @@ -155,7 +156,7 @@ public short toShort() { return (short)s; } - /** {@inheritDoc} */ + @Override public boolean equals(Object obj) { if (obj instanceof FsPermission) { FsPermission that = (FsPermission)obj; @@ -167,10 +168,10 @@ public boolean equals(Object obj) { return false; } - /** {@inheritDoc} */ + @Override public int hashCode() {return toShort();} - /** {@inheritDoc} */ + @Override public String toString() { String str = useraction.SYMBOL + groupaction.SYMBOL + otheraction.SYMBOL; if(stickyBit) { @@ -300,9 +301,11 @@ private static class ImmutableFsPermission extends FsPermission { public ImmutableFsPermission(short permission) { super(permission); } + @Override public FsPermission applyUMask(FsPermission umask) { throw new UnsupportedOperationException(); } + @Override public void readFields(DataInput in) throws IOException { throw new UnsupportedOperationException(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java index f47226f1e2..bc9e392a87 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java @@ -32,6 +32,7 @@ @InterfaceStability.Unstable public class PermissionStatus implements Writable { static final WritableFactory FACTORY = new WritableFactory() { + @Override public Writable newInstance() { return new PermissionStatus(); } }; static { // register a ctor @@ -42,9 +43,11 @@ public class PermissionStatus implements Writable { public static PermissionStatus createImmutable( String user, String group, FsPermission permission) { return new PermissionStatus(user, group, permission) { + @Override public PermissionStatus applyUMask(FsPermission umask) { throw new UnsupportedOperationException(); } + @Override public void readFields(DataInput in) throws IOException { throw new UnsupportedOperationException(); } @@ -82,14 +85,14 @@ public PermissionStatus applyUMask(FsPermission umask) { return this; } - /** {@inheritDoc} */ + @Override public void readFields(DataInput in) throws IOException { username = Text.readString(in, Text.DEFAULT_MAX_LEN); groupname = Text.readString(in, Text.DEFAULT_MAX_LEN); permission = FsPermission.read(in); } - /** {@inheritDoc} */ + @Override public void write(DataOutput out) throws IOException { write(out, username, groupname, permission); } @@ -115,7 +118,7 @@ public static void write(DataOutput out, permission.write(out); } - /** {@inheritDoc} */ + @Override public String toString() { return username + ":" + groupname + ":" + permission; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java index 6667d62189..4adc306633 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java @@ -83,6 +83,7 @@ class Jets3tFileSystemStore implements FileSystemStore { private static final Log LOG = LogFactory.getLog(Jets3tFileSystemStore.class.getName()); + @Override public void initialize(URI uri, Configuration conf) throws IOException { this.conf = conf; @@ -108,6 +109,7 @@ public void initialize(URI uri, Configuration conf) throws IOException { ); } + @Override public String getVersion() throws IOException { return FILE_SYSTEM_VERSION_VALUE; } @@ -123,14 +125,17 @@ private void delete(String key) throws IOException { } } + @Override public void deleteINode(Path path) throws IOException { delete(pathToKey(path)); } + @Override public void deleteBlock(Block block) throws IOException { delete(blockToKey(block)); } + @Override public boolean inodeExists(Path path) throws IOException { InputStream in = get(pathToKey(path), true); if (in == null) { @@ -140,6 +145,7 @@ public boolean inodeExists(Path path) throws IOException { return true; } + @Override public boolean blockExists(long blockId) throws IOException { InputStream in = get(blockToKey(blockId), false); if (in == null) { @@ -203,10 +209,12 @@ private void checkMetadata(S3Object object) throws S3FileSystemException, } } + @Override public INode retrieveINode(Path path) throws IOException { return INode.deserialize(get(pathToKey(path), true)); } + @Override public File retrieveBlock(Block block, long byteRangeStart) throws IOException { File fileBlock = null; @@ -249,6 +257,7 @@ private File newBackupFile() throws IOException { return result; } + @Override public Set listSubPaths(Path path) throws IOException { try { String prefix = pathToKey(path); @@ -270,6 +279,7 @@ public Set listSubPaths(Path path) throws IOException { } } + @Override public Set listDeepSubPaths(Path path) throws IOException { try { String prefix = pathToKey(path); @@ -311,10 +321,12 @@ private void put(String key, InputStream in, long length, boolean storeMetadata) } } + @Override public void storeINode(Path path, INode inode) throws IOException { put(pathToKey(path), inode.serialize(), inode.getSerializedLength(), true); } + @Override public void storeBlock(Block block, File file) throws IOException { BufferedInputStream in = null; try { @@ -354,6 +366,7 @@ private String blockToKey(Block block) { return blockToKey(block.getId()); } + @Override public void purge() throws IOException { try { S3Object[] objects = s3Service.listObjects(bucket); @@ -368,6 +381,7 @@ public void purge() throws IOException { } } + @Override public void dump() throws IOException { StringBuilder sb = new StringBuilder("S3 Filesystem, "); sb.append(bucket.getName()).append("\n"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java index f82755781e..416bfb17c4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java @@ -61,6 +61,7 @@ public static void main(String[] args) throws Exception { System.exit(res); } + @Override public int run(String[] args) throws Exception { if (args.length == 0) { @@ -195,6 +196,7 @@ interface Store { class UnversionedStore implements Store { + @Override public Set listAllPaths() throws IOException { try { String prefix = urlEncode(Path.SEPARATOR); @@ -212,6 +214,7 @@ public Set listAllPaths() throws IOException { } } + @Override public void deleteINode(Path path) throws IOException { delete(pathToKey(path)); } @@ -227,6 +230,7 @@ private void delete(String key) throws IOException { } } + @Override public INode retrieveINode(Path path) throws IOException { return INode.deserialize(get(pathToKey(path))); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3FileSystem.java index 5a5d628adb..81ef31446e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3FileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3FileSystem.java @@ -206,6 +206,7 @@ public FileStatus[] listStatus(Path f) throws IOException { } /** This optional operation is not yet supported. */ + @Override public FSDataOutputStream append(Path f, int bufferSize, Progressable progress) throws IOException { throw new IOException("Not supported"); @@ -298,6 +299,7 @@ private boolean renameRecursive(Path src, Path dst) throws IOException { return true; } + @Override public boolean delete(Path path, boolean recursive) throws IOException { Path absolutePath = makeAbsolute(path); INode inode = store.retrieveINode(absolutePath); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java index c2293ba682..400419c110 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java @@ -49,6 +49,7 @@ class Jets3tNativeFileSystemStore implements NativeFileSystemStore { private S3Service s3Service; private S3Bucket bucket; + @Override public void initialize(URI uri, Configuration conf) throws IOException { S3Credentials s3Credentials = new S3Credentials(); s3Credentials.initialize(uri, conf); @@ -63,6 +64,7 @@ public void initialize(URI uri, Configuration conf) throws IOException { bucket = new S3Bucket(uri.getHost()); } + @Override public void storeFile(String key, File file, byte[] md5Hash) throws IOException { @@ -90,6 +92,7 @@ public void storeFile(String key, File file, byte[] md5Hash) } } + @Override public void storeEmptyFile(String key) throws IOException { try { S3Object object = new S3Object(key); @@ -102,6 +105,7 @@ public void storeEmptyFile(String key) throws IOException { } } + @Override public FileMetadata retrieveMetadata(String key) throws IOException { try { S3Object object = s3Service.getObjectDetails(bucket, key); @@ -117,6 +121,7 @@ public FileMetadata retrieveMetadata(String key) throws IOException { } } + @Override public InputStream retrieve(String key) throws IOException { try { S3Object object = s3Service.getObject(bucket, key); @@ -127,6 +132,7 @@ public InputStream retrieve(String key) throws IOException { } } + @Override public InputStream retrieve(String key, long byteRangeStart) throws IOException { try { @@ -139,11 +145,13 @@ public InputStream retrieve(String key, long byteRangeStart) } } + @Override public PartialListing list(String prefix, int maxListingLength) throws IOException { return list(prefix, maxListingLength, null, false); } + @Override public PartialListing list(String prefix, int maxListingLength, String priorLastKey, boolean recurse) throws IOException { @@ -175,6 +183,7 @@ private PartialListing list(String prefix, String delimiter, } } + @Override public void delete(String key) throws IOException { try { s3Service.deleteObject(bucket, key); @@ -183,6 +192,7 @@ public void delete(String key) throws IOException { } } + @Override public void copy(String srcKey, String dstKey) throws IOException { try { s3Service.copyObject(bucket.getName(), srcKey, bucket.getName(), @@ -192,6 +202,7 @@ public void copy(String srcKey, String dstKey) throws IOException { } } + @Override public void purge(String prefix) throws IOException { try { S3Object[] objects = s3Service.listObjects(bucket, prefix, null); @@ -203,6 +214,7 @@ public void purge(String prefix) throws IOException { } } + @Override public void dump() throws IOException { StringBuilder sb = new StringBuilder("S3 Native Filesystem, "); sb.append(bucket.getName()).append("\n"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFormat.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFormat.java index eea429a97e..e1aeea94ac 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFormat.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFormat.java @@ -150,6 +150,7 @@ protected IllegalNumberOfArgumentsException(int want, int got) { actual = got; } + @Override public String getMessage() { return "expected " + expected + " but got " + actual; } @@ -165,6 +166,7 @@ public TooManyArgumentsException(int expected, int actual) { super(expected, actual); } + @Override public String getMessage() { return "Too many arguments: " + super.getMessage(); } @@ -180,6 +182,7 @@ public NotEnoughArgumentsException(int expected, int actual) { super(expected, actual); } + @Override public String getMessage() { return "Not enough arguments: " + super.getMessage(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java index 71bfc9510d..bc1d8af951 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java @@ -114,6 +114,7 @@ private boolean moveToTrash(PathData item) throws IOException { static class Rmr extends Rm { public static final String NAME = "rmr"; + @Override protected void processOptions(LinkedList args) throws IOException { args.addFirst("-r"); super.processOptions(args); @@ -136,6 +137,7 @@ static class Rmdir extends FsCommand { private boolean ignoreNonEmpty = false; + @Override protected void processOptions(LinkedList args) throws IOException { CommandFormat cf = new CommandFormat( 1, Integer.MAX_VALUE, "-ignore-fail-on-non-empty"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java index 5ae0d67c57..8d598012ec 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java @@ -161,6 +161,7 @@ public TextRecordInputStream(FileStatus f) throws IOException { outbuf = new DataOutputBuffer(); } + @Override public int read() throws IOException { int ret; if (null == inbuf || -1 == (ret = inbuf.read())) { @@ -180,6 +181,7 @@ public int read() throws IOException { return ret; } + @Override public void close() throws IOException { r.close(); super.close(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsCommand.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsCommand.java index 3f397327de..2541be393b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsCommand.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsCommand.java @@ -73,6 +73,7 @@ public String getCommandName() { // abstract method that normally is invoked by runall() which is // overridden below + @Override protected void run(Path path) throws IOException { throw new RuntimeException("not supposed to get here"); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java index b53d2820de..04574cf673 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java @@ -380,6 +380,7 @@ private static int findLongestDirPrefix(String cwd, String path, boolean isDir) * as given on the commandline, or the full path * @return String of the path */ + @Override public String toString() { String scheme = uri.getScheme(); // No interpretation of symbols. Just decode % escaped chars. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java index 85426fa4ff..95d0a2d456 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java @@ -102,6 +102,7 @@ public ChRootedFileSystem(final URI uri, Configuration conf) * for this FileSystem * @param conf the configuration */ + @Override public void initialize(final URI name, final Configuration conf) throws IOException { super.initialize(name, conf); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/NotInMountpointException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/NotInMountpointException.java index f92108cfe7..143ce68ebb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/NotInMountpointException.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/NotInMountpointException.java @@ -20,10 +20,6 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashSet; - import org.apache.hadoop.fs.Path; /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java index 1c0c8dac4d..6031daf118 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java @@ -164,6 +164,7 @@ public String getScheme() { * this FileSystem * @param conf the configuration */ + @Override public void initialize(final URI theUri, final Configuration conf) throws IOException { super.initialize(theUri, conf); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFsFileStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFsFileStatus.java index 871e3d8a63..e0f62e453b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFsFileStatus.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFsFileStatus.java @@ -42,7 +42,8 @@ public boolean equals(Object o) { return super.equals(o); } - public int hashCode() { + @Override + public int hashCode() { return super.hashCode(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java index a4ed255deb..5287581073 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java @@ -892,6 +892,7 @@ private String createWithRetries(final String path, final byte[] data, final List acl, final CreateMode mode) throws InterruptedException, KeeperException { return zkDoWithRetries(new ZKAction() { + @Override public String run() throws KeeperException, InterruptedException { return zkClient.create(path, data, acl, mode); } @@ -901,6 +902,7 @@ public String run() throws KeeperException, InterruptedException { private byte[] getDataWithRetries(final String path, final boolean watch, final Stat stat) throws InterruptedException, KeeperException { return zkDoWithRetries(new ZKAction() { + @Override public byte[] run() throws KeeperException, InterruptedException { return zkClient.getData(path, watch, stat); } @@ -910,6 +912,7 @@ public byte[] run() throws KeeperException, InterruptedException { private Stat setDataWithRetries(final String path, final byte[] data, final int version) throws InterruptedException, KeeperException { return zkDoWithRetries(new ZKAction() { + @Override public Stat run() throws KeeperException, InterruptedException { return zkClient.setData(path, data, version); } @@ -919,6 +922,7 @@ public Stat run() throws KeeperException, InterruptedException { private void deleteWithRetries(final String path, final int version) throws KeeperException, InterruptedException { zkDoWithRetries(new ZKAction() { + @Override public Void run() throws KeeperException, InterruptedException { zkClient.delete(path, version); return null; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java index d4ae0899fb..85912c7c76 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java @@ -56,6 +56,7 @@ public enum HAServiceState { this.name = name; } + @Override public String toString() { return name; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java index 06fb648f42..4898b38726 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java @@ -184,6 +184,7 @@ private FenceMethodWithArg(FenceMethod method, String arg) { this.arg = arg; } + @Override public String toString() { return method.getClass().getCanonicalName() + "(" + arg + ")"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java index 537fba942d..343693e95c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java @@ -274,6 +274,7 @@ private static class LogAdapter implements com.jcraft.jsch.Logger { static final Log LOG = LogFactory.getLog( SshFenceByTcpPort.class.getName() + ".jsch"); + @Override public boolean isEnabled(int level) { switch (level) { case com.jcraft.jsch.Logger.DEBUG: @@ -291,6 +292,7 @@ public boolean isEnabled(int level) { } } + @Override public void log(int level, String message) { switch (level) { case com.jcraft.jsch.Logger.DEBUG: diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java index 7bf3c16e8c..77e9e1601a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java @@ -474,7 +474,7 @@ public void addInternalServlet(String name, String pathSpec, } } - /** {@inheritDoc} */ + @Override public void addFilter(String name, String classname, Map parameters) { @@ -494,7 +494,7 @@ public void addFilter(String name, String classname, filterNames.add(name); } - /** {@inheritDoc} */ + @Override public void addGlobalFilter(String name, String classname, Map parameters) { final String[] ALL_URLS = { "/*" }; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java index bb2f163fe4..6bd9efc689 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java @@ -164,16 +164,18 @@ protected AbstractMapWritable() { } /** @return the conf */ + @Override public Configuration getConf() { return conf.get(); } /** @param conf the conf to set */ + @Override public void setConf(Configuration conf) { this.conf.set(conf); } - /** {@inheritDoc} */ + @Override public void write(DataOutput out) throws IOException { // First write out the size of the class table and any classes that are @@ -187,7 +189,7 @@ public void write(DataOutput out) throws IOException { } } - /** {@inheritDoc} */ + @Override public void readFields(DataInput in) throws IOException { // Get the number of "unknown" classes diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayWritable.java index 875d6efdc2..122aa5ca1e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayWritable.java @@ -88,6 +88,7 @@ public Object toArray() { public Writable[] get() { return values; } + @Override public void readFields(DataInput in) throws IOException { values = new Writable[in.readInt()]; // construct values for (int i = 0; i < values.length; i++) { @@ -97,6 +98,7 @@ public void readFields(DataInput in) throws IOException { } } + @Override public void write(DataOutput out) throws IOException { out.writeInt(values.length); // write values for (int i = 0; i < values.length; i++) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java index 71279b4f6d..0079079a79 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java @@ -57,12 +57,14 @@ public boolean get() { /** */ + @Override public void readFields(DataInput in) throws IOException { value = in.readBoolean(); } /** */ + @Override public void write(DataOutput out) throws IOException { out.writeBoolean(value); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java index ff926c11c1..ffcdea2c9a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java @@ -39,10 +39,12 @@ public ByteWritable() {} /** Return the value of this ByteWritable. */ public byte get() { return value; } + @Override public void readFields(DataInput in) throws IOException { value = in.readByte(); } + @Override public void write(DataOutput out) throws IOException { out.writeByte(value); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java index 012a3bc9d7..7e42a36cb7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java @@ -81,6 +81,7 @@ public byte[] copyBytes() { * if you need the returned array to be precisely the length of the data. * @return The data is only valid between 0 and getLength() - 1. */ + @Override public byte[] getBytes() { return bytes; } @@ -97,6 +98,7 @@ public byte[] get() { /** * Get the current size of the buffer. */ + @Override public int getLength() { return size; } @@ -171,6 +173,7 @@ public void set(byte[] newData, int offset, int length) { } // inherit javadoc + @Override public void readFields(DataInput in) throws IOException { setSize(0); // clear the old data setSize(in.readInt()); @@ -178,6 +181,7 @@ public void readFields(DataInput in) throws IOException { } // inherit javadoc + @Override public void write(DataOutput out) throws IOException { out.writeInt(size); out.write(bytes, 0, size); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java index ad3164b2d2..6550e1f2fd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java @@ -45,6 +45,7 @@ public abstract class CompressedWritable implements Writable { public CompressedWritable() {} + @Override public final void readFields(DataInput in) throws IOException { compressed = new byte[in.readInt()]; in.readFully(compressed, 0, compressed.length); @@ -70,6 +71,7 @@ protected void ensureInflated() { protected abstract void readFieldsCompressed(DataInput in) throws IOException; + @Override public final void write(DataOutput out) throws IOException { if (compressed == null) { ByteArrayOutputStream deflated = new ByteArrayOutputStream(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputByteBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputByteBuffer.java index 469d3ff863..2cd59d75dc 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputByteBuffer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputByteBuffer.java @@ -21,8 +21,6 @@ import java.io.DataInputStream; import java.io.InputStream; import java.nio.ByteBuffer; -import java.util.LinkedList; -import java.util.List; public class DataInputByteBuffer extends DataInputStream { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java index 6cd1f49722..2b8e259464 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java @@ -72,6 +72,7 @@ public DefaultStringifier(Configuration conf, Class c) { } } + @Override public T fromString(String str) throws IOException { try { byte[] bytes = Base64.decodeBase64(str.getBytes("UTF-8")); @@ -83,6 +84,7 @@ public T fromString(String str) throws IOException { } } + @Override public String toString(T obj) throws IOException { outBuf.reset(); serializer.serialize(obj); @@ -91,6 +93,7 @@ public String toString(T obj) throws IOException { return new String(Base64.encodeBase64(buf)); } + @Override public void close() throws IOException { inBuf.close(); outBuf.close(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java index a984cd4ef5..5cc326fe3c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java @@ -42,10 +42,12 @@ public DoubleWritable(double value) { set(value); } + @Override public void readFields(DataInput in) throws IOException { value = in.readDouble(); } + @Override public void write(DataOutput out) throws IOException { out.writeDouble(value); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java index c1ff1ca3bf..dc430cc29c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java @@ -23,7 +23,6 @@ import java.io.IOException; import java.util.EnumSet; import java.util.Iterator; -import java.util.Collection; import java.util.AbstractCollection; import org.apache.hadoop.classification.InterfaceAudience; @@ -46,8 +45,11 @@ public class EnumSetWritable> extends AbstractCollection EnumSetWritable() { } + @Override public Iterator iterator() { return value.iterator(); } + @Override public int size() { return value.size(); } + @Override public boolean add(E e) { if (value == null) { value = EnumSet.of(e); @@ -109,7 +111,7 @@ public EnumSet get() { return value; } - /** {@inheritDoc} */ + @Override @SuppressWarnings("unchecked") public void readFields(DataInput in) throws IOException { int length = in.readInt(); @@ -127,7 +129,7 @@ else if (length == 0) { } } - /** {@inheritDoc} */ + @Override public void write(DataOutput out) throws IOException { if (this.value == null) { out.writeInt(-1); @@ -152,6 +154,7 @@ public void write(DataOutput out) throws IOException { * Returns true if o is an EnumSetWritable with the same value, * or both are null. */ + @Override public boolean equals(Object o) { if (o == null) { throw new IllegalArgumentException("null argument passed in equal()."); @@ -180,27 +183,25 @@ public Class getElementType() { return elementType; } - /** {@inheritDoc} */ + @Override public int hashCode() { if (value == null) return 0; return (int) value.hashCode(); } - /** {@inheritDoc} */ + @Override public String toString() { if (value == null) return "(null)"; return value.toString(); } - /** {@inheritDoc} */ @Override public Configuration getConf() { return this.conf; } - /** {@inheritDoc} */ @Override public void setConf(Configuration conf) { this.conf = conf; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java index 4ade2c4d62..21e4cc4f5b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java @@ -39,10 +39,12 @@ public FloatWritable() {} /** Return the value of this FloatWritable. */ public float get() { return value; } + @Override public void readFields(DataInput in) throws IOException { value = in.readFloat(); } + @Override public void write(DataOutput out) throws IOException { out.writeFloat(value); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java index 8268a5a915..7cfeed7f93 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java @@ -114,11 +114,13 @@ public Writable get() { return instance; } + @Override public String toString() { return "GW[" + (instance != null ? ("class=" + instance.getClass().getName() + ",value=" + instance.toString()) : "(null)") + "]"; } + @Override public void readFields(DataInput in) throws IOException { type = in.readByte(); Class clazz = getTypes()[type & 0xff]; @@ -131,6 +133,7 @@ public void readFields(DataInput in) throws IOException { instance.readFields(in); } + @Override public void write(DataOutput out) throws IOException { if (type == NOT_SET || instance == null) throw new IOException("The GenericWritable has NOT been set correctly. type=" @@ -145,10 +148,12 @@ public void write(DataOutput out) throws IOException { */ abstract protected Class[] getTypes(); + @Override public Configuration getConf() { return conf; } + @Override public void setConf(Configuration conf) { this.conf = conf; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java index 819f075812..a3315a869e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java @@ -272,9 +272,11 @@ public static void closeSocket(Socket sock) { * The /dev/null of OutputStreams. */ public static class NullOutputStream extends OutputStream { + @Override public void write(byte[] b, int off, int len) throws IOException { } + @Override public void write(int b) throws IOException { } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java index 6a44d81db6..f656d028cb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java @@ -42,10 +42,12 @@ public IntWritable() {} /** Return the value of this IntWritable. */ public int get() { return value; } + @Override public void readFields(DataInput in) throws IOException { value = in.readInt(); } + @Override public void write(DataOutput out) throws IOException { out.writeInt(value); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java index b9d64d904d..6dec4aa618 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java @@ -42,15 +42,18 @@ public LongWritable() {} /** Return the value of this LongWritable. */ public long get() { return value; } + @Override public void readFields(DataInput in) throws IOException { value = in.readLong(); } + @Override public void write(DataOutput out) throws IOException { out.writeLong(value); } /** Returns true iff o is a LongWritable with the same value. */ + @Override public boolean equals(Object o) { if (!(o instanceof LongWritable)) return false; @@ -58,17 +61,20 @@ public boolean equals(Object o) { return this.value == other.value; } + @Override public int hashCode() { return (int)value; } /** Compares two LongWritables. */ + @Override public int compareTo(LongWritable o) { long thisValue = this.value; long thatValue = o.value; return (thisValue { public static final int MD5_LEN = 16; private static ThreadLocal DIGESTER_FACTORY = new ThreadLocal() { + @Override protected MessageDigest initialValue() { try { return MessageDigest.getInstance("MD5"); @@ -65,6 +66,7 @@ public MD5Hash(byte[] digest) { } // javadoc from Writable + @Override public void readFields(DataInput in) throws IOException { in.readFully(digest); } @@ -77,6 +79,7 @@ public static MD5Hash read(DataInput in) throws IOException { } // javadoc from Writable + @Override public void write(DataOutput out) throws IOException { out.write(digest); } @@ -155,6 +158,7 @@ public int quarterDigest() { /** Returns true iff o is an MD5Hash whose digest contains the * same values. */ + @Override public boolean equals(Object o) { if (!(o instanceof MD5Hash)) return false; @@ -165,12 +169,14 @@ public boolean equals(Object o) { /** Returns a hash code value for this object. * Only uses the first 4 bytes, since md5s are evenly distributed. */ + @Override public int hashCode() { return quarterDigest(); } /** Compares this object with the specified object for order.*/ + @Override public int compareTo(MD5Hash that) { return WritableComparator.compareBytes(this.digest, 0, MD5_LEN, that.digest, 0, MD5_LEN); @@ -182,6 +188,7 @@ public Comparator() { super(MD5Hash.class); } + @Override public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { return compareBytes(b1, s1, MD5_LEN, b2, s2, MD5_LEN); @@ -196,6 +203,7 @@ public int compare(byte[] b1, int s1, int l1, {'0','1','2','3','4','5','6','7','8','9','a','b','c','d','e','f'}; /** Returns a string representation of this object. */ + @Override public String toString() { StringBuilder buf = new StringBuilder(MD5_LEN*2); for (int i = 0; i < MD5_LEN; i++) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java index 9c14402d75..7e7d855f82 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java @@ -296,6 +296,7 @@ public static void setIndexInterval(Configuration conf, int interval) { } /** Close the map. */ + @Override public synchronized void close() throws IOException { data.close(); index.close(); @@ -723,6 +724,7 @@ public synchronized WritableComparable getClosest(WritableComparable key, } /** Close the map. */ + @Override public synchronized void close() throws IOException { if (!indexClosed) { index.close(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java index 377c9c1656..72c7098d7a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java @@ -55,27 +55,27 @@ public MapWritable(MapWritable other) { copy(other); } - /** {@inheritDoc} */ + @Override public void clear() { instance.clear(); } - /** {@inheritDoc} */ + @Override public boolean containsKey(Object key) { return instance.containsKey(key); } - /** {@inheritDoc} */ + @Override public boolean containsValue(Object value) { return instance.containsValue(value); } - /** {@inheritDoc} */ + @Override public Set> entrySet() { return instance.entrySet(); } - /** {@inheritDoc} */ + @Override public boolean equals(Object obj) { if (this == obj) { return true; @@ -93,27 +93,27 @@ public boolean equals(Object obj) { return false; } - /** {@inheritDoc} */ + @Override public Writable get(Object key) { return instance.get(key); } - /** {@inheritDoc} */ + @Override public int hashCode() { return 1 + this.instance.hashCode(); } - /** {@inheritDoc} */ + @Override public boolean isEmpty() { return instance.isEmpty(); } - /** {@inheritDoc} */ + @Override public Set keySet() { return instance.keySet(); } - /** {@inheritDoc} */ + @Override @SuppressWarnings("unchecked") public Writable put(Writable key, Writable value) { addToMap(key.getClass()); @@ -121,31 +121,30 @@ public Writable put(Writable key, Writable value) { return instance.put(key, value); } - /** {@inheritDoc} */ + @Override public void putAll(Map t) { for (Map.Entry e: t.entrySet()) { put(e.getKey(), e.getValue()); } } - /** {@inheritDoc} */ + @Override public Writable remove(Object key) { return instance.remove(key); } - /** {@inheritDoc} */ + @Override public int size() { return instance.size(); } - /** {@inheritDoc} */ + @Override public Collection values() { return instance.values(); } // Writable - /** {@inheritDoc} */ @Override public void write(DataOutput out) throws IOException { super.write(out); @@ -164,7 +163,6 @@ public void write(DataOutput out) throws IOException { } } - /** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override public void readFields(DataInput in) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java index beb7b17ce7..77c590fdb6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java @@ -35,6 +35,7 @@ private NullWritable() {} // no public ctor /** Returns the single instance of this class. */ public static NullWritable get() { return THIS; } + @Override public String toString() { return "(null)"; } @@ -46,8 +47,11 @@ public String toString() { public int compareTo(NullWritable other) { return 0; } + @Override public boolean equals(Object other) { return other instanceof NullWritable; } + @Override public void readFields(DataInput in) throws IOException {} + @Override public void write(DataOutput out) throws IOException {} /** A Comparator "optimized" for NullWritable. */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java index c555111097..0f0f5c7405 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java @@ -66,15 +66,18 @@ public void set(Object instance) { this.instance = instance; } + @Override public String toString() { return "OW[class=" + declaredClass + ",value=" + instance + "]"; } + @Override public void readFields(DataInput in) throws IOException { readObject(in, this, this.conf); } + @Override public void write(DataOutput out) throws IOException { writeObject(out, instance, declaredClass, conf); } @@ -99,6 +102,7 @@ public NullInstance(Class declaredClass, Configuration conf) { super(conf); this.declaredClass = declaredClass; } + @Override public void readFields(DataInput in) throws IOException { String className = UTF8.readString(in); declaredClass = PRIMITIVE_NAMES.get(className); @@ -110,6 +114,7 @@ public void readFields(DataInput in) throws IOException { } } } + @Override public void write(DataOutput out) throws IOException { UTF8.writeString(out, declaredClass.getName()); } @@ -375,10 +380,12 @@ public static Class loadClass(Configuration conf, String className) { return declaredClass; } + @Override public void setConf(Configuration conf) { this.conf = conf; } + @Override public Configuration getConf() { return this.conf; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java index b7605db9a9..15a396dc2b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java @@ -50,6 +50,7 @@ public class OutputBuffer extends FilterOutputStream { private static class Buffer extends ByteArrayOutputStream { public byte[] getData() { return buf; } public int getLength() { return count; } + @Override public void reset() { count = 0; } public void write(InputStream in, int len) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java index 046d9e4b73..f1545b69c9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java @@ -194,6 +194,7 @@ private ReadaheadRequestImpl(String identifier, FileDescriptor fd, long off, lon this.len = len; } + @Override public void run() { if (canceled) return; // There's a very narrow race here that the file will close right at diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java index 6bc798e7e3..b30c4a4da4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java @@ -24,7 +24,6 @@ import java.io.IOException; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java index 293fdbbb93..8a14860773 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java @@ -625,15 +625,18 @@ private void reset(DataInputStream in, int length) throws IOException { dataSize = length; } + @Override public int getSize() { return dataSize; } + @Override public void writeUncompressedBytes(DataOutputStream outStream) throws IOException { outStream.write(data, 0, dataSize); } + @Override public void writeCompressedBytes(DataOutputStream outStream) throws IllegalArgumentException, IOException { throw @@ -666,10 +669,12 @@ private void reset(DataInputStream in, int length) throws IOException { dataSize = length; } + @Override public int getSize() { return dataSize; } + @Override public void writeUncompressedBytes(DataOutputStream outStream) throws IOException { if (decompressedStream == null) { @@ -687,6 +692,7 @@ public void writeUncompressedBytes(DataOutputStream outStream) } } + @Override public void writeCompressedBytes(DataOutputStream outStream) throws IllegalArgumentException, IOException { outStream.write(data, 0, dataSize); @@ -728,6 +734,7 @@ public TreeMap getMetadata() { return new TreeMap(this.theMetadata); } + @Override public void write(DataOutput out) throws IOException { out.writeInt(this.theMetadata.size()); Iterator> iter = @@ -739,6 +746,7 @@ public void write(DataOutput out) throws IOException { } } + @Override public void readFields(DataInput in) throws IOException { int sz = in.readInt(); if (sz < 0) throw new IOException("Invalid size: " + sz + " for file metadata object"); @@ -752,6 +760,7 @@ public void readFields(DataInput in) throws IOException { } } + @Override public boolean equals(Object other) { if (other == null) { return false; @@ -788,11 +797,13 @@ public boolean equals(Metadata other) { return true; } + @Override public int hashCode() { assert false : "hashCode not designed"; return 42; // any arbitrary constant will do } + @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("size: ").append(this.theMetadata.size()).append("\n"); @@ -1250,6 +1261,7 @@ public void hflush() throws IOException { Configuration getConf() { return conf; } /** Close the file. */ + @Override public synchronized void close() throws IOException { keySerializer.close(); uncompressedValSerializer.close(); @@ -1360,6 +1372,7 @@ static class RecordCompressWriter extends Writer { } /** Append a key/value pair. */ + @Override @SuppressWarnings("unchecked") public synchronized void append(Object key, Object val) throws IOException { @@ -1392,6 +1405,7 @@ public synchronized void append(Object key, Object val) } /** Append a key/value pair. */ + @Override public synchronized void appendRaw(byte[] keyData, int keyOffset, int keyLength, ValueBytes val) throws IOException { @@ -1449,6 +1463,7 @@ void writeBuffer(DataOutputBuffer uncompressedDataBuffer) } /** Compress and flush contents to dfs */ + @Override public synchronized void sync() throws IOException { if (noBufferedRecords > 0) { super.sync(); @@ -1478,6 +1493,7 @@ public synchronized void sync() throws IOException { } /** Close the file. */ + @Override public synchronized void close() throws IOException { if (out != null) { sync(); @@ -1486,6 +1502,7 @@ public synchronized void close() throws IOException { } /** Append a key/value pair. */ + @Override @SuppressWarnings("unchecked") public synchronized void append(Object key, Object val) throws IOException { @@ -1518,6 +1535,7 @@ public synchronized void append(Object key, Object val) } /** Append a key/value pair. */ + @Override public synchronized void appendRaw(byte[] keyData, int keyOffset, int keyLength, ValueBytes val) throws IOException { @@ -1960,6 +1978,7 @@ private Deserializer getDeserializer(SerializationFactory sf, Class c) { } /** Close the file. */ + @Override public synchronized void close() throws IOException { // Return the decompressors to the pool CodecPool.returnDecompressor(keyLenDecompressor); @@ -2618,6 +2637,7 @@ public synchronized long getPosition() throws IOException { } /** Returns the name of the file. */ + @Override public String toString() { return filename; } @@ -2948,6 +2968,7 @@ private void sort(int count) { mergeSort.mergeSort(pointersCopy, pointers, 0, count); } class SeqFileComparator implements Comparator { + @Override public int compare(IntWritable I, IntWritable J) { return comparator.compare(rawBuffer, keyOffsets[I.get()], keyLengths[I.get()], rawBuffer, @@ -3221,6 +3242,7 @@ public MergeQueue(List segments, this.tmpDir = tmpDir; this.progress = progress; } + @Override protected boolean lessThan(Object a, Object b) { // indicate we're making progress if (progress != null) { @@ -3232,6 +3254,7 @@ protected boolean lessThan(Object a, Object b) { msa.getKey().getLength(), msb.getKey().getData(), 0, msb.getKey().getLength()) < 0; } + @Override public void close() throws IOException { SegmentDescriptor ms; // close inputs while ((ms = (SegmentDescriptor)pop()) != null) { @@ -3239,12 +3262,15 @@ public void close() throws IOException { } minSegment = null; } + @Override public DataOutputBuffer getKey() throws IOException { return rawKey; } + @Override public ValueBytes getValue() throws IOException { return rawValue; } + @Override public boolean next() throws IOException { if (size() == 0) return false; @@ -3272,6 +3298,7 @@ public boolean next() throws IOException { return true; } + @Override public Progress getProgress() { return mergeProgress; } @@ -3469,6 +3496,7 @@ public boolean shouldPreserveInput() { return preserveInput; } + @Override public int compareTo(Object o) { SegmentDescriptor that = (SegmentDescriptor)o; if (this.segmentLength != that.segmentLength) { @@ -3481,6 +3509,7 @@ public int compareTo(Object o) { compareTo(that.segmentPathName.toString()); } + @Override public boolean equals(Object o) { if (!(o instanceof SegmentDescriptor)) { return false; @@ -3495,6 +3524,7 @@ public boolean equals(Object o) { return false; } + @Override public int hashCode() { return 37 * 17 + (int) (segmentOffset^(segmentOffset>>>32)); } @@ -3584,12 +3614,14 @@ public LinkedSegmentsDescriptor (long segmentOffset, long segmentLength, /** The default cleanup. Subclasses can override this with a custom * cleanup */ + @Override public void cleanup() throws IOException { super.close(); if (super.shouldPreserveInput()) return; parentContainer.cleanup(); } + @Override public boolean equals(Object o) { if (!(o instanceof LinkedSegmentsDescriptor)) { return false; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java index 9ba0023190..068ca9d40e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java @@ -87,6 +87,7 @@ public Reader(FileSystem fs, String dirName, WritableComparator comparator, Conf } // javadoc inherited + @Override public boolean seek(WritableComparable key) throws IOException { return super.seek(key); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SortedMapWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SortedMapWritable.java index d870a5fd84..eee744ec6a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SortedMapWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SortedMapWritable.java @@ -57,86 +57,86 @@ public SortedMapWritable(SortedMapWritable other) { copy(other); } - /** {@inheritDoc} */ + @Override public Comparator comparator() { // Returning null means we use the natural ordering of the keys return null; } - /** {@inheritDoc} */ + @Override public WritableComparable firstKey() { return instance.firstKey(); } - /** {@inheritDoc} */ + @Override public SortedMap headMap(WritableComparable toKey) { return instance.headMap(toKey); } - /** {@inheritDoc} */ + @Override public WritableComparable lastKey() { return instance.lastKey(); } - /** {@inheritDoc} */ + @Override public SortedMap subMap(WritableComparable fromKey, WritableComparable toKey) { return instance.subMap(fromKey, toKey); } - /** {@inheritDoc} */ + @Override public SortedMap tailMap(WritableComparable fromKey) { return instance.tailMap(fromKey); } - /** {@inheritDoc} */ + @Override public void clear() { instance.clear(); } - /** {@inheritDoc} */ + @Override public boolean containsKey(Object key) { return instance.containsKey(key); } - /** {@inheritDoc} */ + @Override public boolean containsValue(Object value) { return instance.containsValue(value); } - /** {@inheritDoc} */ + @Override public Set> entrySet() { return instance.entrySet(); } - /** {@inheritDoc} */ + @Override public Writable get(Object key) { return instance.get(key); } - /** {@inheritDoc} */ + @Override public boolean isEmpty() { return instance.isEmpty(); } - /** {@inheritDoc} */ + @Override public Set keySet() { return instance.keySet(); } - /** {@inheritDoc} */ + @Override public Writable put(WritableComparable key, Writable value) { addToMap(key.getClass()); addToMap(value.getClass()); return instance.put(key, value); } - /** {@inheritDoc} */ + @Override public void putAll(Map t) { for (Map.Entry e: t.entrySet()) { @@ -145,22 +145,21 @@ public void putAll(Map t) { } } - /** {@inheritDoc} */ + @Override public Writable remove(Object key) { return instance.remove(key); } - /** {@inheritDoc} */ + @Override public int size() { return instance.size(); } - /** {@inheritDoc} */ + @Override public Collection values() { return instance.values(); } - /** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override public void readFields(DataInput in) throws IOException { @@ -187,7 +186,6 @@ public void readFields(DataInput in) throws IOException { } } - /** {@inheritDoc} */ @Override public void write(DataOutput out) throws IOException { super.write(out); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Stringifier.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Stringifier.java index a7ee6876d4..949b14ae57 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Stringifier.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Stringifier.java @@ -54,6 +54,7 @@ public interface Stringifier extends java.io.Closeable { * Closes this object. * @throws IOException if an I/O error occurs * */ + @Override public void close() throws IOException; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java index a4f80ea886..95fb174a9d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java @@ -55,6 +55,7 @@ public class Text extends BinaryComparable private static ThreadLocal ENCODER_FACTORY = new ThreadLocal() { + @Override protected CharsetEncoder initialValue() { return Charset.forName("UTF-8").newEncoder(). onMalformedInput(CodingErrorAction.REPORT). @@ -64,6 +65,7 @@ protected CharsetEncoder initialValue() { private static ThreadLocal DECODER_FACTORY = new ThreadLocal() { + @Override protected CharsetDecoder initialValue() { return Charset.forName("UTF-8").newDecoder(). onMalformedInput(CodingErrorAction.REPORT). @@ -112,11 +114,13 @@ public byte[] copyBytes() { * valid. Please use {@link #copyBytes()} if you * need the returned array to be precisely the length of the data. */ + @Override public byte[] getBytes() { return bytes; } /** Returns the number of bytes in the byte array */ + @Override public int getLength() { return length; } @@ -281,6 +285,7 @@ public String toString() { /** deserialize */ + @Override public void readFields(DataInput in) throws IOException { int newLength = WritableUtils.readVInt(in); setCapacity(newLength, false); @@ -313,6 +318,7 @@ public static void skip(DataInput in) throws IOException { * length uses zero-compressed encoding * @see Writable#write(DataOutput) */ + @Override public void write(DataOutput out) throws IOException { WritableUtils.writeVInt(out, length); out.write(bytes, 0, length); @@ -329,6 +335,7 @@ public void write(DataOutput out, int maxLength) throws IOException { } /** Returns true iff o is a Text with the same contents. */ + @Override public boolean equals(Object o) { if (o instanceof Text) return super.equals(o); @@ -346,6 +353,7 @@ public Comparator() { super(Text.class); } + @Override public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { int n1 = WritableUtils.decodeVIntSize(b1[s1]); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/TwoDArrayWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/TwoDArrayWritable.java index 76304623ee..cf8947d32d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/TwoDArrayWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/TwoDArrayWritable.java @@ -57,6 +57,7 @@ public Object toArray() { public Writable[][] get() { return values; } + @Override public void readFields(DataInput in) throws IOException { // construct matrix values = new Writable[in.readInt()][]; @@ -81,6 +82,7 @@ public void readFields(DataInput in) throws IOException { } } + @Override public void write(DataOutput out) throws IOException { out.writeInt(values.length); // write values for (int i = 0; i < values.length; i++) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java index 6a0f88673f..ef7512996c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java @@ -110,6 +110,7 @@ public void set(UTF8 other) { System.arraycopy(other.bytes, 0, bytes, 0, length); } + @Override public void readFields(DataInput in) throws IOException { length = in.readUnsignedShort(); if (bytes == null || bytes.length < length) @@ -123,6 +124,7 @@ public static void skip(DataInput in) throws IOException { WritableUtils.skipFully(in, length); } + @Override public void write(DataOutput out) throws IOException { out.writeShort(length); out.write(bytes, 0, length); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java index e37b144dbf..f537524c4b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java @@ -43,10 +43,12 @@ public VIntWritable() {} /** Return the value of this VIntWritable. */ public int get() { return value; } + @Override public void readFields(DataInput in) throws IOException { value = WritableUtils.readVInt(in); } + @Override public void write(DataOutput out) throws IOException { WritableUtils.writeVInt(out, value); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java index 869bf43914..a9fac30605 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java @@ -43,10 +43,12 @@ public VLongWritable() {} /** Return the value of this LongWritable. */ public long get() { return value; } + @Override public void readFields(DataInput in) throws IOException { value = WritableUtils.readVLong(in); } + @Override public void write(DataOutput out) throws IOException { WritableUtils.writeVLong(out, value); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionMismatchException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionMismatchException.java index 162374be21..a72be58832 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionMismatchException.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionMismatchException.java @@ -39,6 +39,7 @@ public VersionMismatchException(byte expectedVersionIn, byte foundVersionIn){ } /** Returns a string representation of this object. */ + @Override public String toString(){ return "A record version mismatch occured. Expecting v" + expectedVersion + ", found v" + foundVersion; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionedWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionedWritable.java index a197fd2e4f..c2db55520c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionedWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionedWritable.java @@ -40,11 +40,13 @@ public abstract class VersionedWritable implements Writable { public abstract byte getVersion(); // javadoc from Writable + @Override public void write(DataOutput out) throws IOException { out.writeByte(getVersion()); // store version } // javadoc from Writable + @Override public void readFields(DataInput in) throws IOException { byte version = in.readByte(); // read version if (version != getVersion()) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java index 6eb3a21443..eb3c8d322c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java @@ -120,6 +120,7 @@ public WritableComparable newKey() { * Writable#readFields(DataInput)}, then calls {@link * #compare(WritableComparable,WritableComparable)}. */ + @Override public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { try { buffer.reset(b1, s1, l1); // parse key1 @@ -144,6 +145,7 @@ public int compare(WritableComparable a, WritableComparable b) { return a.compareTo(b); } + @Override public int compare(Object a, Object b) { return compare((WritableComparable)a, (WritableComparable)b); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java index a7a925f35a..35f7cb43ea 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java @@ -63,6 +63,7 @@ public BZip2Codec() { } * @throws java.io.IOException * Throws IO exception */ + @Override public CompressionOutputStream createOutputStream(OutputStream out) throws IOException { return new BZip2CompressionOutputStream(out); @@ -74,6 +75,7 @@ public CompressionOutputStream createOutputStream(OutputStream out) * @return CompressionOutputStream @throws java.io.IOException */ + @Override public CompressionOutputStream createOutputStream(OutputStream out, Compressor compressor) throws IOException { return createOutputStream(out); @@ -84,6 +86,7 @@ public CompressionOutputStream createOutputStream(OutputStream out, * * @return BZip2DummyCompressor.class */ + @Override public Class getCompressorType() { return BZip2DummyCompressor.class; } @@ -93,6 +96,7 @@ public Class getCompressorTy * * @return Compressor */ + @Override public Compressor createCompressor() { return new BZip2DummyCompressor(); } @@ -106,6 +110,7 @@ public Compressor createCompressor() { * @throws java.io.IOException * Throws IOException */ + @Override public CompressionInputStream createInputStream(InputStream in) throws IOException { return new BZip2CompressionInputStream(in); @@ -116,6 +121,7 @@ public CompressionInputStream createInputStream(InputStream in) * * @return CompressionInputStream */ + @Override public CompressionInputStream createInputStream(InputStream in, Decompressor decompressor) throws IOException { return createInputStream(in); @@ -133,6 +139,7 @@ public CompressionInputStream createInputStream(InputStream in, * * @return CompressionInputStream for BZip2 aligned at block boundaries */ + @Override public SplitCompressionInputStream createInputStream(InputStream seekableIn, Decompressor decompressor, long start, long end, READ_MODE readMode) throws IOException { @@ -181,6 +188,7 @@ public SplitCompressionInputStream createInputStream(InputStream seekableIn, * * @return BZip2DummyDecompressor.class */ + @Override public Class getDecompressorType() { return BZip2DummyDecompressor.class; } @@ -190,6 +198,7 @@ public Class getDecompress * * @return Decompressor */ + @Override public Decompressor createDecompressor() { return new BZip2DummyDecompressor(); } @@ -199,6 +208,7 @@ public Decompressor createDecompressor() { * * @return A String telling the default bzip2 file extension */ + @Override public String getDefaultExtension() { return ".bz2"; } @@ -226,6 +236,7 @@ private void writeStreamHeader() throws IOException { } } + @Override public void finish() throws IOException { if (needsReset) { // In the case that nothing is written to this stream, we still need to @@ -245,12 +256,14 @@ private void internalReset() throws IOException { } } + @Override public void resetState() throws IOException { // Cannot write to out at this point because out might not be ready // yet, as in SequenceFile.Writer implementation. needsReset = true; } + @Override public void write(int b) throws IOException { if (needsReset) { internalReset(); @@ -258,6 +271,7 @@ public void write(int b) throws IOException { this.output.write(b); } + @Override public void write(byte[] b, int off, int len) throws IOException { if (needsReset) { internalReset(); @@ -265,6 +279,7 @@ public void write(byte[] b, int off, int len) throws IOException { this.output.write(b, off, len); } + @Override public void close() throws IOException { if (needsReset) { // In the case that nothing is written to this stream, we still need to @@ -382,6 +397,7 @@ private BufferedInputStream readStreamHeader() throws IOException { }// end of method + @Override public void close() throws IOException { if (!needsReset) { input.close(); @@ -417,6 +433,7 @@ public void close() throws IOException { * */ + @Override public int read(byte[] b, int off, int len) throws IOException { if (needsReset) { internalReset(); @@ -440,6 +457,7 @@ public int read(byte[] b, int off, int len) throws IOException { } + @Override public int read() throws IOException { byte b[] = new byte[1]; int result = this.read(b, 0, 1); @@ -454,6 +472,7 @@ private void internalReset() throws IOException { } } + @Override public void resetState() throws IOException { // Cannot read from bufferedIn at this point because bufferedIn // might not be ready @@ -461,6 +480,7 @@ public void resetState() throws IOException { needsReset = true; } + @Override public long getPos() { return this.compressedStreamPosition; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockCompressorStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockCompressorStream.java index 5d854861f2..434183bbc2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockCompressorStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockCompressorStream.java @@ -78,6 +78,7 @@ public BlockCompressorStream(OutputStream out, Compressor compressor) { * Each block contains the uncompressed length for the block, followed by * one or more length-prefixed blocks of compressed data. */ + @Override public void write(byte[] b, int off, int len) throws IOException { // Sanity checks if (compressor.finished()) { @@ -132,6 +133,7 @@ public void write(byte[] b, int off, int len) throws IOException { } } + @Override public void finish() throws IOException { if (!compressor.finished()) { rawWriteInt((int)compressor.getBytesRead()); @@ -142,6 +144,7 @@ public void finish() throws IOException { } } + @Override protected void compress() throws IOException { int len = compressor.compress(buffer, 0, buffer.length); if (len > 0) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java index 42ade89019..7d2504e3e2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java @@ -65,6 +65,7 @@ protected BlockDecompressorStream(InputStream in) throws IOException { super(in); } + @Override protected int decompress(byte[] b, int off, int len) throws IOException { // Check if we are the beginning of a block if (noUncompressedBytes == originalBlockSize) { @@ -104,6 +105,7 @@ protected int decompress(byte[] b, int off, int len) throws IOException { return n; } + @Override protected int getCompressedData() throws IOException { checkStream(); @@ -126,6 +128,7 @@ protected int getCompressedData() throws IOException { return len; } + @Override public void resetState() throws IOException { originalBlockSize = 0; noUncompressedBytes = 0; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java index dc95e9e999..57fb366bdd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java @@ -75,6 +75,7 @@ private void addCodec(CompressionCodec codec) { /** * Print the extension map out as a string. */ + @Override public String toString() { StringBuilder buf = new StringBuilder(); Iterator> itr = diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java index 4f7757dfed..4491819d72 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java @@ -55,6 +55,7 @@ protected CompressionInputStream(InputStream in) throws IOException { this.in = in; } + @Override public void close() throws IOException { in.close(); } @@ -63,6 +64,7 @@ public void close() throws IOException { * Read bytes from the stream. * Made abstract to prevent leakage to underlying stream. */ + @Override public abstract int read(byte[] b, int off, int len) throws IOException; /** @@ -76,6 +78,7 @@ public void close() throws IOException { * * @return Current position in stream as a long */ + @Override public long getPos() throws IOException { if (!(in instanceof Seekable) || !(in instanceof PositionedReadable)){ //This way of getting the current position will not work for file @@ -95,6 +98,7 @@ public long getPos() throws IOException { * @throws UnsupportedOperationException */ + @Override public void seek(long pos) throws UnsupportedOperationException { throw new UnsupportedOperationException(); } @@ -104,6 +108,7 @@ public void seek(long pos) throws UnsupportedOperationException { * * @throws UnsupportedOperationException */ + @Override public boolean seekToNewSource(long targetPos) throws UnsupportedOperationException { throw new UnsupportedOperationException(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java index b4a47946b2..9bd6b84f98 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java @@ -44,11 +44,13 @@ protected CompressionOutputStream(OutputStream out) { this.out = out; } + @Override public void close() throws IOException { finish(); out.close(); } + @Override public void flush() throws IOException { out.flush(); } @@ -57,6 +59,7 @@ public void flush() throws IOException { * Write compressed bytes to the stream. * Made abstract to prevent leakage to underlying stream. */ + @Override public abstract void write(byte[] b, int off, int len) throws IOException; /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressorStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressorStream.java index 4cd7425ba6..84f1b2f179 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressorStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressorStream.java @@ -59,6 +59,7 @@ protected CompressorStream(OutputStream out) { super(out); } + @Override public void write(byte[] b, int off, int len) throws IOException { // Sanity checks if (compressor.finished()) { @@ -83,6 +84,7 @@ protected void compress() throws IOException { } } + @Override public void finish() throws IOException { if (!compressor.finished()) { compressor.finish(); @@ -92,10 +94,12 @@ public void finish() throws IOException { } } + @Override public void resetState() throws IOException { compressor.reset(); } + @Override public void close() throws IOException { if (!closed) { finish(); @@ -105,6 +109,7 @@ public void close() throws IOException { } private byte[] oneByte = new byte[1]; + @Override public void write(int b) throws IOException { oneByte[0] = (byte)(b & 0xff); write(oneByte, 0, oneByte.length); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java index d0ef6ee6d3..16e0ad763a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java @@ -66,11 +66,13 @@ protected DecompressorStream(InputStream in) throws IOException { } private byte[] oneByte = new byte[1]; + @Override public int read() throws IOException { checkStream(); return (read(oneByte, 0, oneByte.length) == -1) ? -1 : (oneByte[0] & 0xff); } + @Override public int read(byte[] b, int off, int len) throws IOException { checkStream(); @@ -163,11 +165,13 @@ protected void checkStream() throws IOException { } } + @Override public void resetState() throws IOException { decompressor.reset(); } private byte[] skipBytes = new byte[512]; + @Override public long skip(long n) throws IOException { // Sanity checks if (n < 0) { @@ -189,11 +193,13 @@ public long skip(long n) throws IOException { return skipped; } + @Override public int available() throws IOException { checkStream(); return (eof) ? 0 : 1; } + @Override public void close() throws IOException { if (!closed) { in.close(); @@ -201,13 +207,16 @@ public void close() throws IOException { } } + @Override public boolean markSupported() { return false; } + @Override public synchronized void mark(int readlimit) { } + @Override public synchronized void reset() throws IOException { throw new IOException("mark/reset not supported"); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java index 1be28bfce3..ea7df20de3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java @@ -37,14 +37,17 @@ public class DefaultCodec implements Configurable, CompressionCodec { Configuration conf; + @Override public void setConf(Configuration conf) { this.conf = conf; } + @Override public Configuration getConf() { return conf; } + @Override public CompressionOutputStream createOutputStream(OutputStream out) throws IOException { // This may leak memory if called in a loop. The createCompressor() call @@ -57,6 +60,7 @@ public CompressionOutputStream createOutputStream(OutputStream out) conf.getInt("io.file.buffer.size", 4*1024)); } + @Override public CompressionOutputStream createOutputStream(OutputStream out, Compressor compressor) throws IOException { @@ -64,20 +68,24 @@ public CompressionOutputStream createOutputStream(OutputStream out, conf.getInt("io.file.buffer.size", 4*1024)); } + @Override public Class getCompressorType() { return ZlibFactory.getZlibCompressorType(conf); } + @Override public Compressor createCompressor() { return ZlibFactory.getZlibCompressor(conf); } + @Override public CompressionInputStream createInputStream(InputStream in) throws IOException { return new DecompressorStream(in, createDecompressor(), conf.getInt("io.file.buffer.size", 4*1024)); } + @Override public CompressionInputStream createInputStream(InputStream in, Decompressor decompressor) throws IOException { @@ -85,14 +93,17 @@ public CompressionInputStream createInputStream(InputStream in, conf.getInt("io.file.buffer.size", 4*1024)); } + @Override public Class getDecompressorType() { return ZlibFactory.getZlibDecompressorType(conf); } + @Override public Decompressor createDecompressor() { return ZlibFactory.getZlibDecompressor(conf); } + @Override public String getDefaultExtension() { return ".deflate"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java index b17fe4b39e..520205e166 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java @@ -20,15 +20,11 @@ import java.io.*; import java.util.zip.GZIPOutputStream; -import java.util.zip.GZIPInputStream; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.DefaultCodec; import org.apache.hadoop.io.compress.zlib.*; -import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel; -import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy; /** * This class creates gzip compressors/decompressors. @@ -66,32 +62,39 @@ protected GzipOutputStream(CompressorStream out) { super(out); } + @Override public void close() throws IOException { out.close(); } + @Override public void flush() throws IOException { out.flush(); } + @Override public void write(int b) throws IOException { out.write(b); } + @Override public void write(byte[] data, int offset, int length) throws IOException { out.write(data, offset, length); } + @Override public void finish() throws IOException { ((ResetableGZIPOutputStream) out).finish(); } + @Override public void resetState() throws IOException { ((ResetableGZIPOutputStream) out).resetState(); } } + @Override public CompressionOutputStream createOutputStream(OutputStream out) throws IOException { return (ZlibFactory.isNativeZlibLoaded(conf)) ? @@ -100,6 +103,7 @@ public CompressionOutputStream createOutputStream(OutputStream out) new GzipOutputStream(out); } + @Override public CompressionOutputStream createOutputStream(OutputStream out, Compressor compressor) throws IOException { @@ -110,23 +114,27 @@ public CompressionOutputStream createOutputStream(OutputStream out, createOutputStream(out); } + @Override public Compressor createCompressor() { return (ZlibFactory.isNativeZlibLoaded(conf)) ? new GzipZlibCompressor(conf) : null; } + @Override public Class getCompressorType() { return ZlibFactory.isNativeZlibLoaded(conf) ? GzipZlibCompressor.class : null; } + @Override public CompressionInputStream createInputStream(InputStream in) throws IOException { return createInputStream(in, null); } + @Override public CompressionInputStream createInputStream(InputStream in, Decompressor decompressor) throws IOException { @@ -137,18 +145,21 @@ public CompressionInputStream createInputStream(InputStream in, conf.getInt("io.file.buffer.size", 4*1024)); } + @Override public Decompressor createDecompressor() { return (ZlibFactory.isNativeZlibLoaded(conf)) ? new GzipZlibDecompressor() : new BuiltInGzipDecompressor(); } + @Override public Class getDecompressorType() { return ZlibFactory.isNativeZlibLoaded(conf) ? GzipZlibDecompressor.class : BuiltInGzipDecompressor.class; } + @Override public String getDefaultExtension() { return ".gz"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java index 14cc9d5b82..00e892d845 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java @@ -338,6 +338,7 @@ private void changeStateToProcessABlock() throws IOException { } + @Override public int read() throws IOException { if (this.in != null) { @@ -372,6 +373,7 @@ public int read() throws IOException { */ + @Override public int read(final byte[] dest, final int offs, final int len) throws IOException { if (offs < 0) { @@ -574,6 +576,7 @@ private void complete() throws IOException { } } + @Override public void close() throws IOException { InputStream inShadow = this.in; if (inShadow != null) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java index 3060eb924f..ca4e5cd0df 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java @@ -639,6 +639,7 @@ public CBZip2OutputStream(final OutputStream out, final int blockSize) init(); } + @Override public void write(final int b) throws IOException { if (this.out != null) { write0(b); @@ -704,6 +705,7 @@ private void writeRun() throws IOException { /** * Overriden to close the stream. */ + @Override protected void finalize() throws Throwable { finish(); super.finalize(); @@ -726,6 +728,7 @@ public void finish() throws IOException { } } + @Override public void close() throws IOException { if (out != null) { OutputStream outShadow = this.out; @@ -739,6 +742,7 @@ public void close() throws IOException { } } + @Override public void flush() throws IOException { OutputStream outShadow = this.out; if (outShadow != null) { @@ -849,6 +853,7 @@ public final int getBlockSize() { return this.blockSize100k; } + @Override public void write(final byte[] buf, int offs, final int len) throws IOException { if (offs < 0) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java index 0cf65e5144..22a3118f5f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java @@ -258,6 +258,7 @@ public synchronized int getRemaining() { return 0; } + @Override public synchronized void reset() { finished = false; compressedDirectBufLen = 0; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java index baf864094e..4620092f08 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java @@ -257,6 +257,7 @@ public synchronized int getRemaining() { return 0; } + @Override public synchronized void reset() { finished = false; compressedDirectBufLen = 0; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java index 1e5525e743..41f8036fda 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java @@ -122,7 +122,7 @@ public BuiltInGzipDecompressor() { // in the first buffer load? (But how else would one do it?) } - /** {@inheritDoc} */ + @Override public synchronized boolean needsInput() { if (state == GzipStateLabel.DEFLATE_STREAM) { // most common case return inflater.needsInput(); @@ -144,6 +144,7 @@ public synchronized boolean needsInput() { * the bulk deflate stream, which is a performance hit we don't want * to absorb. (Decompressor now documents this requirement.) */ + @Override public synchronized void setInput(byte[] b, int off, int len) { if (b == null) { throw new NullPointerException(); @@ -175,6 +176,7 @@ public synchronized void setInput(byte[] b, int off, int len) { * methods below), the deflate stream is never copied; Inflater operates * directly on the user's buffer. */ + @Override public synchronized int decompress(byte[] b, int off, int len) throws IOException { int numAvailBytes = 0; @@ -421,16 +423,17 @@ public synchronized long getBytesRead() { * * @return the total (non-negative) number of unprocessed bytes in input */ + @Override public synchronized int getRemaining() { return userBufLen; } - /** {@inheritDoc} */ + @Override public synchronized boolean needsDictionary() { return inflater.needsDictionary(); } - /** {@inheritDoc} */ + @Override public synchronized void setDictionary(byte[] b, int off, int len) { inflater.setDictionary(b, off, len); } @@ -439,6 +442,7 @@ public synchronized void setDictionary(byte[] b, int off, int len) { * Returns true if the end of the gzip substream (single "member") has been * reached.

*/ + @Override public synchronized boolean finished() { return (state == GzipStateLabel.FINISHED); } @@ -447,6 +451,7 @@ public synchronized boolean finished() { * Resets everything, including the input buffer, regardless of whether the * current gzip substream is finished.

*/ + @Override public synchronized void reset() { // could optionally emit INFO message if state != GzipStateLabel.FINISHED inflater.reset(); @@ -463,7 +468,7 @@ public synchronized void reset() { hasHeaderCRC = false; } - /** {@inheritDoc} */ + @Override public synchronized void end() { inflater.end(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java index b269d557b7..509456e834 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java @@ -48,6 +48,7 @@ public BuiltInZlibDeflater() { super(); } + @Override public synchronized int compress(byte[] b, int off, int len) throws IOException { return super.deflate(b, off, len); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibInflater.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibInflater.java index 0223587ad0..4fda6723b8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibInflater.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibInflater.java @@ -39,6 +39,7 @@ public BuiltInZlibInflater() { super(); } + @Override public synchronized int decompress(byte[] b, int off, int len) throws IOException { try { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java index 8839bc98fa..c0d0d699a5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java @@ -259,6 +259,7 @@ public synchronized void reinit(Configuration conf) { } } + @Override public synchronized void setInput(byte[] b, int off, int len) { if (b== null) { throw new NullPointerException(); @@ -287,6 +288,7 @@ synchronized void setInputFromSavedData() { uncompressedDirectBufLen = uncompressedDirectBuf.position(); } + @Override public synchronized void setDictionary(byte[] b, int off, int len) { if (stream == 0 || b == null) { throw new NullPointerException(); @@ -297,6 +299,7 @@ public synchronized void setDictionary(byte[] b, int off, int len) { setDictionary(stream, b, off, len); } + @Override public synchronized boolean needsInput() { // Consume remaining compressed data? if (compressedDirectBuf.remaining() > 0) { @@ -325,16 +328,19 @@ public synchronized boolean needsInput() { return false; } + @Override public synchronized void finish() { finish = true; } + @Override public synchronized boolean finished() { // Check if 'zlib' says its 'finished' and // all compressed data has been consumed return (finished && compressedDirectBuf.remaining() == 0); } + @Override public synchronized int compress(byte[] b, int off, int len) throws IOException { if (b == null) { @@ -385,6 +391,7 @@ public synchronized int compress(byte[] b, int off, int len) * * @return the total (non-negative) number of compressed bytes output so far */ + @Override public synchronized long getBytesWritten() { checkStream(); return getBytesWritten(stream); @@ -395,11 +402,13 @@ public synchronized long getBytesWritten() { * * @return the total (non-negative) number of uncompressed bytes input so far */ + @Override public synchronized long getBytesRead() { checkStream(); return getBytesRead(stream); } + @Override public synchronized void reset() { checkStream(); reset(stream); @@ -413,6 +422,7 @@ public synchronized void reset() { userBufOff = userBufLen = 0; } + @Override public synchronized void end() { if (stream != 0) { end(stream); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java index 2db70551e8..ba67571998 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java @@ -118,6 +118,7 @@ public ZlibDecompressor() { this(CompressionHeader.DEFAULT_HEADER, DEFAULT_DIRECT_BUFFER_SIZE); } + @Override public synchronized void setInput(byte[] b, int off, int len) { if (b == null) { throw new NullPointerException(); @@ -154,6 +155,7 @@ synchronized void setInputFromSavedData() { userBufLen -= compressedDirectBufLen; } + @Override public synchronized void setDictionary(byte[] b, int off, int len) { if (stream == 0 || b == null) { throw new NullPointerException(); @@ -165,6 +167,7 @@ public synchronized void setDictionary(byte[] b, int off, int len) { needDict = false; } + @Override public synchronized boolean needsInput() { // Consume remaining compressed data? if (uncompressedDirectBuf.remaining() > 0) { @@ -184,16 +187,19 @@ public synchronized boolean needsInput() { return false; } + @Override public synchronized boolean needsDictionary() { return needDict; } + @Override public synchronized boolean finished() { // Check if 'zlib' says it's 'finished' and // all compressed data has been consumed return (finished && uncompressedDirectBuf.remaining() == 0); } + @Override public synchronized int decompress(byte[] b, int off, int len) throws IOException { if (b == null) { @@ -255,6 +261,7 @@ public synchronized long getBytesRead() { * * @return the total (non-negative) number of unprocessed bytes in input */ + @Override public synchronized int getRemaining() { checkStream(); return userBufLen + getRemaining(stream); // userBuf + compressedDirectBuf @@ -263,6 +270,7 @@ public synchronized int getRemaining() { /** * Resets everything including the input buffers (user and direct).

*/ + @Override public synchronized void reset() { checkStream(); reset(stream); @@ -274,6 +282,7 @@ public synchronized void reset() { userBufOff = userBufLen = 0; } + @Override public synchronized void end() { if (stream != 0) { end(stream); @@ -281,6 +290,7 @@ public synchronized void end() { } } + @Override protected void finalize() { end(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java index 6b4fdd89aa..ce93266574 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java @@ -300,6 +300,7 @@ public Writer(FSDataOutputStream fout, String compressionName, * Close the BCFile Writer. Attempting to use the Writer after calling * close is not allowed and may lead to undetermined results. */ + @Override public void close() throws IOException { if (closed == true) { return; @@ -447,6 +448,7 @@ private class MetaBlockRegister implements BlockRegister { this.compressAlgo = compressAlgo; } + @Override public void register(long raw, long begin, long end) { metaIndex.addEntry(new MetaIndexEntry(name, compressAlgo, new BlockRegion(begin, end - begin, raw))); @@ -463,6 +465,7 @@ private class DataBlockRegister implements BlockRegister { // do nothing } + @Override public void register(long raw, long begin, long end) { dataIndex.addBlockRegion(new BlockRegion(begin, end - begin, raw)); } @@ -671,6 +674,7 @@ public Version getAPIVersion() { /** * Finishing reading the BCFile. Release all resources. */ + @Override public void close() { // nothing to be done now } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/CompareUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/CompareUtils.java index a9cb1ec1c3..0808711f89 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/CompareUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/CompareUtils.java @@ -68,6 +68,7 @@ public ScalarLong(long m) { magnitude = m; } + @Override public long magnitude() { return magnitude; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java index 0b9ed9d2b3..9a57581c90 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java @@ -297,6 +297,7 @@ public Writer(FSDataOutputStream fsdos, int minBlockSize, * * The underlying FSDataOutputStream is not closed. */ + @Override public void close() throws IOException { if ((state == State.CLOSED)) { return; @@ -820,6 +821,7 @@ public Reader(FSDataInputStream fsdis, long fileLength, Configuration conf) * Close the reader. The state of the Reader object is undefined after * close. Calling close() for multiple times has no effect. */ + @Override public void close() throws IOException { readerBCF.close(); } @@ -1573,6 +1575,7 @@ private void parkCursorAtEnd() throws IOException { * scanner after calling close is not defined. The entry returned by the * previous entry() call will be invalid. */ + @Override public void close() throws IOException { parkCursorAtEnd(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java index 2a7f883d95..4cfa0761ed 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java @@ -202,6 +202,7 @@ public static class Stat { this.mode = mode; } + @Override public String toString() { return "Stat(owner='" + owner + "', group='" + group + "'" + ", mode=" + mode + ")"; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIOException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIOException.java index 5064df5d86..db653b23f4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIOException.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIOException.java @@ -38,6 +38,7 @@ public Errno getErrno() { return errno; } + @Override public String toString() { return errno.toString() + ": " + super.getMessage(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java index 8b8387ce2c..5c29a33312 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java @@ -150,6 +150,7 @@ public static final RetryPolicy failoverOnNetworkException( } static class TryOnceThenFail implements RetryPolicy { + @Override public RetryAction shouldRetry(Exception e, int retries, int failovers, boolean isMethodIdempotent) throws Exception { return RetryAction.FAIL; @@ -157,6 +158,7 @@ public RetryAction shouldRetry(Exception e, int retries, int failovers, } static class RetryForever implements RetryPolicy { + @Override public RetryAction shouldRetry(Exception e, int retries, int failovers, boolean isMethodIdempotent) throws Exception { return RetryAction.RETRY; @@ -430,6 +432,7 @@ public ExceptionDependentRetry(RetryPolicy defaultPolicy, this.exceptionToPolicyMap = exceptionToPolicyMap; } + @Override public RetryAction shouldRetry(Exception e, int retries, int failovers, boolean isMethodIdempotent) throws Exception { RetryPolicy policy = exceptionToPolicyMap.get(e.getClass()); @@ -457,6 +460,7 @@ public RemoteExceptionDependentRetry(RetryPolicy defaultPolicy, } } + @Override public RetryAction shouldRetry(Exception e, int retries, int failovers, boolean isMethodIdempotent) throws Exception { RetryPolicy policy = null; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/DeserializerComparator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/DeserializerComparator.java index 7e74cb7732..05205c5523 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/DeserializerComparator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/DeserializerComparator.java @@ -56,6 +56,7 @@ protected DeserializerComparator(Deserializer deserializer) this.deserializer.open(buffer); } + @Override public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { try { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerialization.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerialization.java index 61d6f171c9..f08d0008c6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerialization.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerialization.java @@ -24,11 +24,8 @@ import java.io.ObjectOutputStream; import java.io.OutputStream; import java.io.Serializable; -import java.util.Map; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.io.RawComparator; /** *

@@ -45,6 +42,7 @@ static class JavaSerializationDeserializer private ObjectInputStream ois; + @Override public void open(InputStream in) throws IOException { ois = new ObjectInputStream(in) { @Override protected void readStreamHeader() { @@ -53,6 +51,7 @@ public void open(InputStream in) throws IOException { }; } + @Override @SuppressWarnings("unchecked") public T deserialize(T object) throws IOException { try { @@ -63,6 +62,7 @@ public T deserialize(T object) throws IOException { } } + @Override public void close() throws IOException { ois.close(); } @@ -74,6 +74,7 @@ static class JavaSerializationSerializer private ObjectOutputStream oos; + @Override public void open(OutputStream out) throws IOException { oos = new ObjectOutputStream(out) { @Override protected void writeStreamHeader() { @@ -82,27 +83,32 @@ public void open(OutputStream out) throws IOException { }; } + @Override public void serialize(Serializable object) throws IOException { oos.reset(); // clear (class) back-references oos.writeObject(object); } + @Override public void close() throws IOException { oos.close(); } } + @Override @InterfaceAudience.Private public boolean accept(Class c) { return Serializable.class.isAssignableFrom(c); } + @Override @InterfaceAudience.Private public Deserializer getDeserializer(Class c) { return new JavaSerializationDeserializer(); } + @Override @InterfaceAudience.Private public Serializer getSerializer(Class c) { return new JavaSerializationSerializer(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java index 12927bea14..f9bf692f1f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java @@ -44,6 +44,7 @@ public JavaSerializationComparator() throws IOException { super(new JavaSerialization.JavaSerializationDeserializer()); } + @Override @InterfaceAudience.Private public int compare(T o1, T o2) { return o1.compareTo(o2); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/WritableSerialization.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/WritableSerialization.java index 8511d25bcd..ad965d6b2f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/WritableSerialization.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/WritableSerialization.java @@ -23,8 +23,6 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.util.Map; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java index 1d5c068886..f340cb3a98 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java @@ -47,11 +47,13 @@ public abstract class AvroSerialization extends Configured @InterfaceAudience.Private public static final String AVRO_SCHEMA_KEY = "Avro-Schema"; + @Override @InterfaceAudience.Private public Deserializer getDeserializer(Class c) { return new AvroDeserializer(c); } + @Override @InterfaceAudience.Private public Serializer getSerializer(Class c) { return new AvroSerializer(c); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java index b0f5c93f75..de7af1b6b0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java @@ -364,6 +364,7 @@ private void handleTimeout(SocketTimeoutException e) throws IOException { * until a byte is read. * @throws IOException for any IO problem other than socket timeout */ + @Override public int read() throws IOException { do { try { @@ -380,6 +381,7 @@ public int read() throws IOException { * * @return the total number of bytes read; -1 if the connection is closed. */ + @Override public int read(byte[] buf, int off, int len) throws IOException { do { try { @@ -510,6 +512,7 @@ private synchronized void handleSaslConnectionFailure( final Random rand, final UserGroupInformation ugi) throws IOException, InterruptedException { ugi.doAs(new PrivilegedExceptionAction() { + @Override public Object run() throws IOException, InterruptedException { final short MAX_BACKOFF = 5000; closeConnection(); @@ -803,6 +806,7 @@ private synchronized void sendPing() throws IOException { } } + @Override public void run() { if (LOG.isDebugEnabled()) LOG.debug(getName() + ": starting, having connections " diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java index 96ca97817a..3a33abe6a6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java @@ -240,6 +240,7 @@ public Object invoke(Object proxy, Method method, Object[] args) return returnMessage; } + @Override public void close() throws IOException { if (!isClosed) { isClosed = true; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java index f15c0837dc..cc66958d14 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java @@ -19,7 +19,6 @@ package org.apache.hadoop.ipc; import java.io.IOException; -import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.HashSet; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java index 1cc269f666..69d18eacd3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java @@ -36,7 +36,8 @@ public class ProtocolSignature implements Writable { WritableFactories.setFactory (ProtocolSignature.class, new WritableFactory() { - public Writable newInstance() { return new ProtocolSignature(); } + @Override + public Writable newInstance() { return new ProtocolSignature(); } }); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java index a3460ed9b0..bc3317120f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java @@ -517,6 +517,7 @@ private class Reader extends Thread { this.readSelector = Selector.open(); } + @Override public void run() { LOG.info("Starting " + getName()); try { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java index 965c77abc7..fc6fc6f8c5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java @@ -142,6 +142,7 @@ public long getRpcVersion() { return rpcVersion; } + @Override @SuppressWarnings("deprecation") public void readFields(DataInput in) throws IOException { rpcVersion = in.readLong(); @@ -159,6 +160,7 @@ public void readFields(DataInput in) throws IOException { } } + @Override @SuppressWarnings("deprecation") public void write(DataOutput out) throws IOException { out.writeLong(rpcVersion); @@ -173,6 +175,7 @@ public void write(DataOutput out) throws IOException { } } + @Override public String toString() { StringBuilder buffer = new StringBuilder(); buffer.append(methodName); @@ -189,10 +192,12 @@ public String toString() { return buffer.toString(); } + @Override public void setConf(Configuration conf) { this.conf = conf; } + @Override public Configuration getConf() { return this.conf; } @@ -215,6 +220,7 @@ public Invoker(Class protocol, this.client = CLIENTS.getClient(conf, factory); } + @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { long startTime = 0; @@ -232,6 +238,7 @@ public Object invoke(Object proxy, Method method, Object[] args) } /* close the IPC client that's responsible for this invoker's RPCs */ + @Override synchronized public void close() { if (!isClosed) { isClosed = true; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java index 775279dfb4..5e2ed53255 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java @@ -88,6 +88,7 @@ private static void process(String urlstring) { public static class Servlet extends HttpServlet { private static final long serialVersionUID = 1L; + @Override public void doGet(HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java index c4fa203c61..841874fc08 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java @@ -35,7 +35,6 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.metrics.ContextFactory; -import org.apache.hadoop.metrics.MetricsException; import org.apache.hadoop.metrics.spi.AbstractMetricsContext; import org.apache.hadoop.metrics.spi.OutputRecord; import org.apache.hadoop.metrics.spi.Util; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java index 1e2ee39814..60f5fec44a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java @@ -30,7 +30,6 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.metrics.ContextFactory; import org.apache.hadoop.metrics.MetricsContext; -import org.apache.hadoop.metrics.MetricsException; import org.apache.hadoop.metrics.MetricsRecord; import org.apache.hadoop.metrics.MetricsUtil; import org.apache.hadoop.metrics.Updater; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/NullContextWithUpdateThread.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/NullContextWithUpdateThread.java index 63f7ddaacd..ff2a49c69a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/NullContextWithUpdateThread.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/NullContextWithUpdateThread.java @@ -21,7 +21,6 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.metrics.ContextFactory; -import org.apache.hadoop.metrics.MetricsException; /** * A null context which has a thread calling diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/OutputRecord.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/OutputRecord.java index fa2c09217f..d94c8ab46e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/OutputRecord.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/OutputRecord.java @@ -21,11 +21,7 @@ package org.apache.hadoop.metrics.spi; import java.util.Collections; -import java.util.Map; import java.util.Set; -import java.util.TreeMap; -import java.util.Map.Entry; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.metrics.spi.AbstractMetricsContext.MetricMap; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/Util.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/Util.java index 166a846fdf..32aa431991 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/Util.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/Util.java @@ -22,7 +22,6 @@ package org.apache.hadoop.metrics.spi; import java.net.InetSocketAddress; -import java.net.SocketAddress; import java.util.ArrayList; import java.util.List; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsIntValue.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsIntValue.java index a84ebc9898..f7a173a2fb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsIntValue.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsIntValue.java @@ -19,8 +19,6 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics.MetricsRecord; -import org.apache.hadoop.util.StringUtils; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java index 790397fd4b..e68d97e7b7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java @@ -19,8 +19,6 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics.MetricsRecord; -import org.apache.hadoop.util.StringUtils; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java index ad431395eb..7d0e1b2fe1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java @@ -20,8 +20,6 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics.MetricsRecord; -import org.apache.hadoop.util.StringUtils; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java index 73f7ef3011..db09248128 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java @@ -19,8 +19,6 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics.MetricsRecord; -import org.apache.hadoop.util.StringUtils; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java index 230a9f00f0..e4b5580536 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java @@ -24,7 +24,6 @@ import java.net.URLClassLoader; import static java.security.AccessController.*; import java.security.PrivilegedAction; -import java.util.Arrays; import java.util.Iterator; import java.util.Locale; import java.util.Map; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/FileSink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/FileSink.java index 929590f467..df1b008be1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/FileSink.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/FileSink.java @@ -18,7 +18,6 @@ package org.apache.hadoop.metrics2.sink; -import java.io.BufferedOutputStream; import java.io.File; import java.io.FileWriter; import java.io.PrintWriter; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/source/JvmMetrics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/source/JvmMetrics.java index 852f9132d3..f436a667f9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/source/JvmMetrics.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/source/JvmMetrics.java @@ -23,7 +23,6 @@ import java.lang.management.MemoryUsage; import java.lang.management.ThreadInfo; import java.lang.management.ThreadMXBean; -import static java.lang.Thread.State.*; import java.lang.management.GarbageCollectorMXBean; import java.util.Map; import java.util.List; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/AbstractDNSToSwitchMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/AbstractDNSToSwitchMapping.java index b1f0fb230f..b2d803c95f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/AbstractDNSToSwitchMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/AbstractDNSToSwitchMapping.java @@ -24,7 +24,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; -import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java index 0a9ff7301b..73bbd4a631 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java @@ -30,7 +30,6 @@ import java.util.Collections; import java.util.Enumeration; import java.util.LinkedHashSet; -import java.util.Set; import java.util.Vector; import javax.naming.NamingException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java index b3627ea264..046f0e7467 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java @@ -737,6 +737,7 @@ public int countNumOfAvailableNodes(String scope, } /** convert a network tree to a string */ + @Override public String toString() { // print the number of racks StringBuilder tree = new StringBuilder(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java index a41a42463c..b8502d016b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java @@ -26,7 +26,6 @@ import org.apache.hadoop.util.Shell.ShellCommandExecutor; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java index 62076b191b..ed12b3c6be 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java @@ -33,7 +33,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Time; /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputStream.java index a0b0c3ed0f..46039a5506 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputStream.java @@ -50,6 +50,7 @@ private static class Reader extends SocketIOWithTimeout { this.channel = channel; } + @Override int performIO(ByteBuffer buf) throws IOException { return channel.read(buf); } @@ -123,10 +124,12 @@ public int read() throws IOException { return ret; } + @Override public int read(byte[] b, int off, int len) throws IOException { return read(ByteBuffer.wrap(b, off, len)); } + @Override public synchronized void close() throws IOException { /* close the channel since Socket.getInputStream().close() * closes the socket. @@ -146,10 +149,12 @@ public ReadableByteChannel getChannel() { //ReadableByteChannel interface + @Override public boolean isOpen() { return reader.isOpen(); } + @Override public int read(ByteBuffer dst) throws IOException { return reader.doIO(dst, SelectionKey.OP_READ); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketOutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketOutputStream.java index e7bfadeeda..091c684059 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketOutputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketOutputStream.java @@ -58,6 +58,7 @@ private static class Writer extends SocketIOWithTimeout { this.channel = channel; } + @Override int performIO(ByteBuffer buf) throws IOException { return channel.write(buf); } @@ -98,6 +99,7 @@ public SocketOutputStream(Socket socket, long timeout) this(socket.getChannel(), timeout); } + @Override public void write(int b) throws IOException { /* If we need to, we can optimize this allocation. * probably no need to optimize or encourage single byte writes. @@ -107,6 +109,7 @@ public void write(int b) throws IOException { write(buf, 0, 1); } + @Override public void write(byte[] b, int off, int len) throws IOException { ByteBuffer buf = ByteBuffer.wrap(b, off, len); while (buf.hasRemaining()) { @@ -126,6 +129,7 @@ public void write(byte[] b, int off, int len) throws IOException { } } + @Override public synchronized void close() throws IOException { /* close the channel since Socket.getOuputStream().close() * closes the socket. @@ -145,10 +149,12 @@ public WritableByteChannel getChannel() { //WritableByteChannle interface + @Override public boolean isOpen() { return writer.isOpen(); } + @Override public int write(ByteBuffer src) throws IOException { return writer.doIO(src, SelectionKey.OP_WRITE); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocksSocketFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocksSocketFactory.java index 0192aa4cbc..6b84f9d2cf 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocksSocketFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocksSocketFactory.java @@ -59,14 +59,12 @@ public SocksSocketFactory(Proxy proxy) { this.proxy = proxy; } - /* @inheritDoc */ @Override public Socket createSocket() throws IOException { return new Socket(proxy); } - /* @inheritDoc */ @Override public Socket createSocket(InetAddress addr, int port) throws IOException { @@ -75,7 +73,6 @@ public Socket createSocket(InetAddress addr, int port) throws IOException { return socket; } - /* @inheritDoc */ @Override public Socket createSocket(InetAddress addr, int port, InetAddress localHostAddr, int localPort) throws IOException { @@ -86,7 +83,6 @@ public Socket createSocket(InetAddress addr, int port, return socket; } - /* @inheritDoc */ @Override public Socket createSocket(String host, int port) throws IOException, UnknownHostException { @@ -96,7 +92,6 @@ public Socket createSocket(String host, int port) throws IOException, return socket; } - /* @inheritDoc */ @Override public Socket createSocket(String host, int port, InetAddress localHostAddr, int localPort) throws IOException, @@ -108,13 +103,11 @@ public Socket createSocket(String host, int port, return socket; } - /* @inheritDoc */ @Override public int hashCode() { return proxy.hashCode(); } - /* @inheritDoc */ @Override public boolean equals(Object obj) { if (this == obj) @@ -132,12 +125,12 @@ public boolean equals(Object obj) { return true; } - /* @inheritDoc */ + @Override public Configuration getConf() { return this.conf; } - /* @inheritDoc */ + @Override public void setConf(Configuration conf) { this.conf = conf; String proxyStr = conf.get("hadoop.socks.server"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/StandardSocketFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/StandardSocketFactory.java index f4942cef26..ac38819360 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/StandardSocketFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/StandardSocketFactory.java @@ -42,7 +42,6 @@ public class StandardSocketFactory extends SocketFactory { public StandardSocketFactory() { } - /* @inheritDoc */ @Override public Socket createSocket() throws IOException { /* @@ -63,7 +62,6 @@ public Socket createSocket() throws IOException { return SocketChannel.open().socket(); } - /* @inheritDoc */ @Override public Socket createSocket(InetAddress addr, int port) throws IOException { @@ -72,7 +70,6 @@ public Socket createSocket(InetAddress addr, int port) throws IOException { return socket; } - /* @inheritDoc */ @Override public Socket createSocket(InetAddress addr, int port, InetAddress localHostAddr, int localPort) throws IOException { @@ -83,7 +80,6 @@ public Socket createSocket(InetAddress addr, int port, return socket; } - /* @inheritDoc */ @Override public Socket createSocket(String host, int port) throws IOException, UnknownHostException { @@ -93,7 +89,6 @@ public Socket createSocket(String host, int port) throws IOException, return socket; } - /* @inheritDoc */ @Override public Socket createSocket(String host, int port, InetAddress localHostAddr, int localPort) throws IOException, @@ -105,7 +100,6 @@ public Socket createSocket(String host, int port, return socket; } - /* @inheritDoc */ @Override public boolean equals(Object obj) { if (this == obj) @@ -115,7 +109,6 @@ public boolean equals(Object obj) { return obj.getClass().equals(this.getClass()); } - /* @inheritDoc */ @Override public int hashCode() { return this.getClass().hashCode(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java index 277432bf14..b245c80969 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java @@ -125,6 +125,7 @@ private synchronized void load() { } } + @Override public synchronized List resolve(List names) { if (!initialized) { initialized = true; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/BinaryRecordInput.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/BinaryRecordInput.java index d4c60e08d6..5c302e55e3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/BinaryRecordInput.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/BinaryRecordInput.java @@ -41,9 +41,11 @@ static private class BinaryIndex implements Index { private BinaryIndex(int nelems) { this.nelems = nelems; } + @Override public boolean done() { return (nelems <= 0); } + @Override public void incr() { nelems--; } @@ -56,6 +58,7 @@ private void setDataInput(DataInput inp) { } private static ThreadLocal bIn = new ThreadLocal() { + @Override protected synchronized Object initialValue() { return new BinaryRecordInput(); } @@ -82,34 +85,42 @@ public BinaryRecordInput(DataInput din) { this.in = din; } + @Override public byte readByte(final String tag) throws IOException { return in.readByte(); } + @Override public boolean readBool(final String tag) throws IOException { return in.readBoolean(); } + @Override public int readInt(final String tag) throws IOException { return Utils.readVInt(in); } + @Override public long readLong(final String tag) throws IOException { return Utils.readVLong(in); } + @Override public float readFloat(final String tag) throws IOException { return in.readFloat(); } + @Override public double readDouble(final String tag) throws IOException { return in.readDouble(); } + @Override public String readString(final String tag) throws IOException { return Utils.fromBinaryString(in); } + @Override public Buffer readBuffer(final String tag) throws IOException { final int len = Utils.readVInt(in); final byte[] barr = new byte[len]; @@ -117,26 +128,32 @@ public Buffer readBuffer(final String tag) throws IOException { return new Buffer(barr); } + @Override public void startRecord(final String tag) throws IOException { // no-op } + @Override public void endRecord(final String tag) throws IOException { // no-op } + @Override public Index startVector(final String tag) throws IOException { return new BinaryIndex(readInt(tag)); } + @Override public void endVector(final String tag) throws IOException { // no-op } + @Override public Index startMap(final String tag) throws IOException { return new BinaryIndex(readInt(tag)); } + @Override public void endMap(final String tag) throws IOException { // no-op } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/BinaryRecordOutput.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/BinaryRecordOutput.java index 699f635e68..aa6b8e95c5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/BinaryRecordOutput.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/BinaryRecordOutput.java @@ -45,6 +45,7 @@ private void setDataOutput(DataOutput out) { } private static ThreadLocal bOut = new ThreadLocal() { + @Override protected synchronized Object initialValue() { return new BinaryRecordOutput(); } @@ -72,34 +73,42 @@ public BinaryRecordOutput(DataOutput out) { } + @Override public void writeByte(byte b, String tag) throws IOException { out.writeByte(b); } + @Override public void writeBool(boolean b, String tag) throws IOException { out.writeBoolean(b); } + @Override public void writeInt(int i, String tag) throws IOException { Utils.writeVInt(out, i); } + @Override public void writeLong(long l, String tag) throws IOException { Utils.writeVLong(out, l); } + @Override public void writeFloat(float f, String tag) throws IOException { out.writeFloat(f); } + @Override public void writeDouble(double d, String tag) throws IOException { out.writeDouble(d); } + @Override public void writeString(String s, String tag) throws IOException { Utils.toBinaryString(out, s); } + @Override public void writeBuffer(Buffer buf, String tag) throws IOException { byte[] barr = buf.get(); @@ -108,20 +117,26 @@ public void writeBuffer(Buffer buf, String tag) out.write(barr, 0, len); } + @Override public void startRecord(Record r, String tag) throws IOException {} + @Override public void endRecord(Record r, String tag) throws IOException {} + @Override public void startVector(ArrayList v, String tag) throws IOException { writeInt(v.size(), tag); } + @Override public void endVector(ArrayList v, String tag) throws IOException {} + @Override public void startMap(TreeMap v, String tag) throws IOException { writeInt(v.size(), tag); } + @Override public void endMap(TreeMap v, String tag) throws IOException {} } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Buffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Buffer.java index 5f6630852b..eb569271d2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Buffer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Buffer.java @@ -187,6 +187,7 @@ public void append(byte[] bytes) { } // inherit javadoc + @Override public int hashCode() { int hash = 1; byte[] b = this.get(); @@ -202,6 +203,7 @@ public int hashCode() { * @return Positive if this is bigger than other, 0 if they are equal, and * negative if this is smaller than other. */ + @Override public int compareTo(Object other) { Buffer right = ((Buffer) other); byte[] lb = this.get(); @@ -217,6 +219,7 @@ public int compareTo(Object other) { } // inherit javadoc + @Override public boolean equals(Object other) { if (other instanceof Buffer && this != other) { return compareTo(other) == 0; @@ -225,6 +228,7 @@ public boolean equals(Object other) { } // inheric javadoc + @Override public String toString() { StringBuilder sb = new StringBuilder(2*count); for(int idx = 0; idx < count; idx++) { @@ -245,6 +249,7 @@ public String toString(String charsetName) } // inherit javadoc + @Override public Object clone() throws CloneNotSupportedException { Buffer result = (Buffer) super.clone(); result.copy(this.get(), 0, this.getCount()); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordInput.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordInput.java index 44093573c9..e9fa0c35b8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordInput.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordInput.java @@ -38,6 +38,7 @@ public class CsvRecordInput implements RecordInput { private PushbackReader stream; private class CsvIndex implements Index { + @Override public boolean done() { char c = '\0'; try { @@ -47,6 +48,7 @@ public boolean done() { } return (c == '}') ? true : false; } + @Override public void incr() {} } @@ -85,19 +87,23 @@ public CsvRecordInput(InputStream in) { } } + @Override public byte readByte(String tag) throws IOException { return (byte) readLong(tag); } + @Override public boolean readBool(String tag) throws IOException { String sval = readField(tag); return "T".equals(sval) ? true : false; } + @Override public int readInt(String tag) throws IOException { return (int) readLong(tag); } + @Override public long readLong(String tag) throws IOException { String sval = readField(tag); try { @@ -108,10 +114,12 @@ public long readLong(String tag) throws IOException { } } + @Override public float readFloat(String tag) throws IOException { return (float) readDouble(tag); } + @Override public double readDouble(String tag) throws IOException { String sval = readField(tag); try { @@ -122,16 +130,19 @@ public double readDouble(String tag) throws IOException { } } + @Override public String readString(String tag) throws IOException { String sval = readField(tag); return Utils.fromCSVString(sval); } + @Override public Buffer readBuffer(String tag) throws IOException { String sval = readField(tag); return Utils.fromCSVBuffer(sval); } + @Override public void startRecord(String tag) throws IOException { if (tag != null && !"".equals(tag)) { char c1 = (char) stream.read(); @@ -142,6 +153,7 @@ public void startRecord(String tag) throws IOException { } } + @Override public void endRecord(String tag) throws IOException { char c = (char) stream.read(); if (tag == null || "".equals(tag)) { @@ -163,6 +175,7 @@ public void endRecord(String tag) throws IOException { return; } + @Override public Index startVector(String tag) throws IOException { char c1 = (char) stream.read(); char c2 = (char) stream.read(); @@ -172,6 +185,7 @@ public Index startVector(String tag) throws IOException { return new CsvIndex(); } + @Override public void endVector(String tag) throws IOException { char c = (char) stream.read(); if (c != '}') { @@ -184,6 +198,7 @@ public void endVector(String tag) throws IOException { return; } + @Override public Index startMap(String tag) throws IOException { char c1 = (char) stream.read(); char c2 = (char) stream.read(); @@ -193,6 +208,7 @@ public Index startMap(String tag) throws IOException { return new CsvIndex(); } + @Override public void endMap(String tag) throws IOException { char c = (char) stream.read(); if (c != '}') { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordOutput.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordOutput.java index a638d0bcc9..d770f47cf5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordOutput.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordOutput.java @@ -61,10 +61,12 @@ public CsvRecordOutput(OutputStream out) { } } + @Override public void writeByte(byte b, String tag) throws IOException { writeLong((long)b, tag); } + @Override public void writeBool(boolean b, String tag) throws IOException { printCommaUnlessFirst(); String val = b ? "T" : "F"; @@ -72,32 +74,38 @@ public void writeBool(boolean b, String tag) throws IOException { throwExceptionOnError(tag); } + @Override public void writeInt(int i, String tag) throws IOException { writeLong((long)i, tag); } + @Override public void writeLong(long l, String tag) throws IOException { printCommaUnlessFirst(); stream.print(l); throwExceptionOnError(tag); } + @Override public void writeFloat(float f, String tag) throws IOException { writeDouble((double)f, tag); } + @Override public void writeDouble(double d, String tag) throws IOException { printCommaUnlessFirst(); stream.print(d); throwExceptionOnError(tag); } + @Override public void writeString(String s, String tag) throws IOException { printCommaUnlessFirst(); stream.print(Utils.toCSVString(s)); throwExceptionOnError(tag); } + @Override public void writeBuffer(Buffer buf, String tag) throws IOException { printCommaUnlessFirst(); @@ -105,6 +113,7 @@ public void writeBuffer(Buffer buf, String tag) throwExceptionOnError(tag); } + @Override public void startRecord(Record r, String tag) throws IOException { if (tag != null && !"".equals(tag)) { printCommaUnlessFirst(); @@ -113,6 +122,7 @@ public void startRecord(Record r, String tag) throws IOException { } } + @Override public void endRecord(Record r, String tag) throws IOException { if (tag == null || "".equals(tag)) { stream.print("\n"); @@ -123,23 +133,27 @@ public void endRecord(Record r, String tag) throws IOException { } } + @Override public void startVector(ArrayList v, String tag) throws IOException { printCommaUnlessFirst(); stream.print("v{"); isFirst = true; } + @Override public void endVector(ArrayList v, String tag) throws IOException { stream.print("}"); isFirst = false; } + @Override public void startMap(TreeMap v, String tag) throws IOException { printCommaUnlessFirst(); stream.print("m{"); isFirst = true; } + @Override public void endMap(TreeMap v, String tag) throws IOException { stream.print("}"); isFirst = false; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Record.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Record.java index df0b6c2c85..f0ec99ad81 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Record.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Record.java @@ -54,6 +54,7 @@ public abstract void deserialize(RecordInput rin, String tag) throws IOException; // inheric javadoc + @Override public abstract int compareTo (final Object peer) throws ClassCastException; /** @@ -73,18 +74,21 @@ public void deserialize(RecordInput rin) throws IOException { } // inherit javadoc + @Override public void write(final DataOutput out) throws java.io.IOException { BinaryRecordOutput bout = BinaryRecordOutput.get(out); this.serialize(bout); } // inherit javadoc + @Override public void readFields(final DataInput din) throws java.io.IOException { BinaryRecordInput rin = BinaryRecordInput.get(din); this.deserialize(rin); } // inherit javadoc + @Override public String toString() { try { ByteArrayOutputStream s = new ByteArrayOutputStream(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/RecordComparator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/RecordComparator.java index 2c86804c14..805d93160a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/RecordComparator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/RecordComparator.java @@ -40,6 +40,7 @@ protected RecordComparator(Class recordClass) { } // inheric JavaDoc + @Override public abstract int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2); /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/XmlRecordInput.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/XmlRecordInput.java index 5e77a4552b..871e04bff0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/XmlRecordInput.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/XmlRecordInput.java @@ -63,10 +63,13 @@ private XMLParser(ArrayList vlist) { valList = vlist; } + @Override public void startDocument() throws SAXException {} + @Override public void endDocument() throws SAXException {} + @Override public void startElement(String ns, String sname, String qname, @@ -88,6 +91,7 @@ public void startElement(String ns, } } + @Override public void endElement(String ns, String sname, String qname) throws SAXException { @@ -98,6 +102,7 @@ public void endElement(String ns, } } + @Override public void characters(char buf[], int offset, int len) throws SAXException { if (charsValid) { @@ -109,6 +114,7 @@ public void characters(char buf[], int offset, int len) } private class XmlIndex implements Index { + @Override public boolean done() { Value v = valList.get(vIdx); if ("/array".equals(v.getType())) { @@ -119,6 +125,7 @@ public boolean done() { return false; } } + @Override public void incr() {} } @@ -152,6 +159,7 @@ public XmlRecordInput(InputStream in) { } } + @Override public byte readByte(String tag) throws IOException { Value v = next(); if (!"ex:i1".equals(v.getType())) { @@ -160,6 +168,7 @@ public byte readByte(String tag) throws IOException { return Byte.parseByte(v.getValue()); } + @Override public boolean readBool(String tag) throws IOException { Value v = next(); if (!"boolean".equals(v.getType())) { @@ -168,6 +177,7 @@ public boolean readBool(String tag) throws IOException { return "1".equals(v.getValue()); } + @Override public int readInt(String tag) throws IOException { Value v = next(); if (!"i4".equals(v.getType()) && @@ -177,6 +187,7 @@ public int readInt(String tag) throws IOException { return Integer.parseInt(v.getValue()); } + @Override public long readLong(String tag) throws IOException { Value v = next(); if (!"ex:i8".equals(v.getType())) { @@ -185,6 +196,7 @@ public long readLong(String tag) throws IOException { return Long.parseLong(v.getValue()); } + @Override public float readFloat(String tag) throws IOException { Value v = next(); if (!"ex:float".equals(v.getType())) { @@ -193,6 +205,7 @@ public float readFloat(String tag) throws IOException { return Float.parseFloat(v.getValue()); } + @Override public double readDouble(String tag) throws IOException { Value v = next(); if (!"double".equals(v.getType())) { @@ -201,6 +214,7 @@ public double readDouble(String tag) throws IOException { return Double.parseDouble(v.getValue()); } + @Override public String readString(String tag) throws IOException { Value v = next(); if (!"string".equals(v.getType())) { @@ -209,6 +223,7 @@ public String readString(String tag) throws IOException { return Utils.fromXMLString(v.getValue()); } + @Override public Buffer readBuffer(String tag) throws IOException { Value v = next(); if (!"string".equals(v.getType())) { @@ -217,6 +232,7 @@ public Buffer readBuffer(String tag) throws IOException { return Utils.fromXMLBuffer(v.getValue()); } + @Override public void startRecord(String tag) throws IOException { Value v = next(); if (!"struct".equals(v.getType())) { @@ -224,6 +240,7 @@ public void startRecord(String tag) throws IOException { } } + @Override public void endRecord(String tag) throws IOException { Value v = next(); if (!"/struct".equals(v.getType())) { @@ -231,6 +248,7 @@ public void endRecord(String tag) throws IOException { } } + @Override public Index startVector(String tag) throws IOException { Value v = next(); if (!"array".equals(v.getType())) { @@ -239,12 +257,15 @@ public Index startVector(String tag) throws IOException { return new XmlIndex(); } + @Override public void endVector(String tag) throws IOException {} + @Override public Index startMap(String tag) throws IOException { return startVector(tag); } + @Override public void endMap(String tag) throws IOException { endVector(tag); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/XmlRecordOutput.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/XmlRecordOutput.java index adf28a2fdf..9cf85537ed 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/XmlRecordOutput.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/XmlRecordOutput.java @@ -149,6 +149,7 @@ public XmlRecordOutput(OutputStream out) { } } + @Override public void writeByte(byte b, String tag) throws IOException { printBeginEnvelope(tag); stream.print(""); @@ -157,6 +158,7 @@ public void writeByte(byte b, String tag) throws IOException { printEndEnvelope(tag); } + @Override public void writeBool(boolean b, String tag) throws IOException { printBeginEnvelope(tag); stream.print(""); @@ -165,6 +167,7 @@ public void writeBool(boolean b, String tag) throws IOException { printEndEnvelope(tag); } + @Override public void writeInt(int i, String tag) throws IOException { printBeginEnvelope(tag); stream.print(""); @@ -173,6 +176,7 @@ public void writeInt(int i, String tag) throws IOException { printEndEnvelope(tag); } + @Override public void writeLong(long l, String tag) throws IOException { printBeginEnvelope(tag); stream.print(""); @@ -181,6 +185,7 @@ public void writeLong(long l, String tag) throws IOException { printEndEnvelope(tag); } + @Override public void writeFloat(float f, String tag) throws IOException { printBeginEnvelope(tag); stream.print(""); @@ -189,6 +194,7 @@ public void writeFloat(float f, String tag) throws IOException { printEndEnvelope(tag); } + @Override public void writeDouble(double d, String tag) throws IOException { printBeginEnvelope(tag); stream.print(""); @@ -197,6 +203,7 @@ public void writeDouble(double d, String tag) throws IOException { printEndEnvelope(tag); } + @Override public void writeString(String s, String tag) throws IOException { printBeginEnvelope(tag); stream.print(""); @@ -205,6 +212,7 @@ public void writeString(String s, String tag) throws IOException { printEndEnvelope(tag); } + @Override public void writeBuffer(Buffer buf, String tag) throws IOException { printBeginEnvelope(tag); @@ -214,12 +222,14 @@ public void writeBuffer(Buffer buf, String tag) printEndEnvelope(tag); } + @Override public void startRecord(Record r, String tag) throws IOException { insideRecord(tag); stream.print("\n"); addIndent(); } + @Override public void endRecord(Record r, String tag) throws IOException { closeIndent(); putIndent(); @@ -227,12 +237,14 @@ public void endRecord(Record r, String tag) throws IOException { outsideRecord(tag); } + @Override public void startVector(ArrayList v, String tag) throws IOException { insideVector(tag); stream.print("\n"); addIndent(); } + @Override public void endVector(ArrayList v, String tag) throws IOException { closeIndent(); putIndent(); @@ -240,12 +252,14 @@ public void endVector(ArrayList v, String tag) throws IOException { outsideVector(tag); } + @Override public void startMap(TreeMap v, String tag) throws IOException { insideMap(tag); stream.print("\n"); addIndent(); } + @Override public void endMap(TreeMap v, String tag) throws IOException { closeIndent(); putIndent(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CGenerator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CGenerator.java index b62b62924b..69ab37a152 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CGenerator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CGenerator.java @@ -37,6 +37,7 @@ class CGenerator extends CodeGenerator { * and spits-out file-level elements (such as include statements etc.) * record-level code is generated by JRecord. */ + @Override void genCode(String name, ArrayList ilist, ArrayList rlist, String destDir, ArrayList options) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CodeBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CodeBuffer.java index a18871c74a..ec4d5df981 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CodeBuffer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CodeBuffer.java @@ -98,6 +98,7 @@ private void rawAppend(char ch) { sb.append(ch); } + @Override public String toString() { return sb.toString(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/Consts.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/Consts.java index 7f3e8d74df..2a186fa283 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/Consts.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/Consts.java @@ -18,12 +18,8 @@ package org.apache.hadoop.record.compiler; -import java.io.IOException; -import java.util.Iterator; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.record.RecordInput; /** * const definitions for Record I/O compiler diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CppGenerator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CppGenerator.java index e1fb599c04..1c97a48d53 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CppGenerator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CppGenerator.java @@ -37,6 +37,7 @@ class CppGenerator extends CodeGenerator { * and spits-out file-level elements (such as include statements etc.) * record-level code is generated by JRecord. */ + @Override void genCode(String name, ArrayList ilist, ArrayList rlist, String destDir, ArrayList options) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JBoolean.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JBoolean.java index 77791f2723..95dc7334f7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JBoolean.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JBoolean.java @@ -36,20 +36,24 @@ class JavaBoolean extends JType.JavaType { super("boolean", "Bool", "Boolean", "TypeID.RIOType.BOOL"); } + @Override void genCompareTo(CodeBuffer cb, String fname, String other) { cb.append(Consts.RIO_PREFIX + "ret = ("+fname+" == "+other+")? 0 : ("+ fname+"?1:-1);\n"); } + @Override String getTypeIDObjectString() { return "org.apache.hadoop.record.meta.TypeID.BoolTypeID"; } + @Override void genHashCode(CodeBuffer cb, String fname) { cb.append(Consts.RIO_PREFIX + "ret = ("+fname+")?0:1;\n"); } // In Binary format, boolean is written as byte. true = 1, false = 0 + @Override void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("{\n"); cb.append("if ("+l+"<1) {\n"); @@ -61,6 +65,7 @@ void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { } // In Binary format, boolean is written as byte. true = 1, false = 0 + @Override void genCompareBytes(CodeBuffer cb) { cb.append("{\n"); cb.append("if (l1<1 || l2<1) {\n"); @@ -81,6 +86,7 @@ class CppBoolean extends CppType { super("bool"); } + @Override String getTypeIDObjectString() { return "new ::hadoop::TypeID(::hadoop::RIOTYPE_BOOL)"; } @@ -93,6 +99,7 @@ public JBoolean() { setCType(new CType()); } + @Override String getSignature() { return "z"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JBuffer.java index 53b8264e3e..7c4a670607 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JBuffer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JBuffer.java @@ -39,22 +39,27 @@ class JavaBuffer extends JavaCompType { "org.apache.hadoop.record.Buffer", "TypeID.RIOType.BUFFER"); } + @Override String getTypeIDObjectString() { return "org.apache.hadoop.record.meta.TypeID.BufferTypeID"; } + @Override void genCompareTo(CodeBuffer cb, String fname, String other) { cb.append(Consts.RIO_PREFIX + "ret = "+fname+".compareTo("+other+");\n"); } + @Override void genEquals(CodeBuffer cb, String fname, String peer) { cb.append(Consts.RIO_PREFIX + "ret = "+fname+".equals("+peer+");\n"); } + @Override void genHashCode(CodeBuffer cb, String fname) { cb.append(Consts.RIO_PREFIX + "ret = "+fname+".hashCode();\n"); } + @Override void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("{\n"); cb.append("int i = org.apache.hadoop.record.Utils.readVInt("+ @@ -64,6 +69,7 @@ void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("}\n"); } + @Override void genCompareBytes(CodeBuffer cb) { cb.append("{\n"); cb.append("int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);\n"); @@ -84,6 +90,7 @@ class CppBuffer extends CppCompType { super(" ::std::string"); } + @Override void genGetSet(CodeBuffer cb, String fname) { cb.append("virtual const "+getType()+"& get"+toCamelCase(fname)+"() const {\n"); cb.append("return "+fname+";\n"); @@ -93,6 +100,7 @@ void genGetSet(CodeBuffer cb, String fname) { cb.append("}\n"); } + @Override String getTypeIDObjectString() { return "new ::hadoop::TypeID(::hadoop::RIOTYPE_BUFFER)"; } @@ -105,6 +113,7 @@ public JBuffer() { setCType(new CCompType()); } + @Override String getSignature() { return "B"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JByte.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JByte.java index 96f191d5fb..1ac7171f24 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JByte.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JByte.java @@ -37,10 +37,12 @@ class JavaByte extends JavaType { super("byte", "Byte", "Byte", "TypeID.RIOType.BYTE"); } + @Override String getTypeIDObjectString() { return "org.apache.hadoop.record.meta.TypeID.ByteTypeID"; } + @Override void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("{\n"); cb.append("if ("+l+"<1) {\n"); @@ -51,6 +53,7 @@ void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("}\n"); } + @Override void genCompareBytes(CodeBuffer cb) { cb.append("{\n"); cb.append("if (l1<1 || l2<1) {\n"); @@ -71,6 +74,7 @@ class CppByte extends CppType { super("int8_t"); } + @Override String getTypeIDObjectString() { return "new ::hadoop::TypeID(::hadoop::RIOTYPE_BYTE)"; } @@ -82,6 +86,7 @@ public JByte() { setCType(new CType()); } + @Override String getSignature() { return "b"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JCompType.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JCompType.java index f71d97d50c..5f41aeb56f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JCompType.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JCompType.java @@ -35,18 +35,22 @@ abstract class JavaCompType extends JavaType { super(type, suffix, wrapper, typeIDByteString); } + @Override void genCompareTo(CodeBuffer cb, String fname, String other) { cb.append(Consts.RIO_PREFIX + "ret = "+fname+".compareTo("+other+");\n"); } + @Override void genEquals(CodeBuffer cb, String fname, String peer) { cb.append(Consts.RIO_PREFIX + "ret = "+fname+".equals("+peer+");\n"); } + @Override void genHashCode(CodeBuffer cb, String fname) { cb.append(Consts.RIO_PREFIX + "ret = "+fname+".hashCode();\n"); } + @Override void genClone(CodeBuffer cb, String fname) { cb.append(Consts.RIO_PREFIX + "other."+fname+" = ("+getType()+") this."+ fname+".clone();\n"); @@ -59,6 +63,7 @@ abstract class CppCompType extends CppType { super(type); } + @Override void genGetSet(CodeBuffer cb, String fname) { cb.append("virtual const "+getType()+"& get"+toCamelCase(fname)+"() const {\n"); cb.append("return "+fname+";\n"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JDouble.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JDouble.java index cdab26341d..5e1a65fc07 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JDouble.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JDouble.java @@ -36,15 +36,18 @@ class JavaDouble extends JavaType { super("double", "Double", "Double", "TypeID.RIOType.DOUBLE"); } + @Override String getTypeIDObjectString() { return "org.apache.hadoop.record.meta.TypeID.DoubleTypeID"; } + @Override void genHashCode(CodeBuffer cb, String fname) { String tmp = "Double.doubleToLongBits("+fname+")"; cb.append(Consts.RIO_PREFIX + "ret = (int)("+tmp+"^("+tmp+">>>32));\n"); } + @Override void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("{\n"); cb.append("if ("+l+"<8) {\n"); @@ -55,6 +58,7 @@ void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("}\n"); } + @Override void genCompareBytes(CodeBuffer cb) { cb.append("{\n"); cb.append("if (l1<8 || l2<8) {\n"); @@ -77,6 +81,7 @@ class CppDouble extends CppType { super("double"); } + @Override String getTypeIDObjectString() { return "new ::hadoop::TypeID(::hadoop::RIOTYPE_DOUBLE)"; } @@ -90,6 +95,7 @@ public JDouble() { setCType(new CType()); } + @Override String getSignature() { return "d"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JFloat.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JFloat.java index 1081651be6..10aa69ad2f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JFloat.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JFloat.java @@ -35,14 +35,17 @@ class JavaFloat extends JavaType { super("float", "Float", "Float", "TypeID.RIOType.FLOAT"); } + @Override String getTypeIDObjectString() { return "org.apache.hadoop.record.meta.TypeID.FloatTypeID"; } + @Override void genHashCode(CodeBuffer cb, String fname) { cb.append(Consts.RIO_PREFIX + "ret = Float.floatToIntBits("+fname+");\n"); } + @Override void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("{\n"); cb.append("if ("+l+"<4) {\n"); @@ -53,6 +56,7 @@ void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("}\n"); } + @Override void genCompareBytes(CodeBuffer cb) { cb.append("{\n"); cb.append("if (l1<4 || l2<4) {\n"); @@ -75,6 +79,7 @@ class CppFloat extends CppType { super("float"); } + @Override String getTypeIDObjectString() { return "new ::hadoop::TypeID(::hadoop::RIOTYPE_FLOAT)"; } @@ -87,6 +92,7 @@ public JFloat() { setCType(new CType()); } + @Override String getSignature() { return "f"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JInt.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JInt.java index b1303e44bd..d18445ff55 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JInt.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JInt.java @@ -38,10 +38,12 @@ class JavaInt extends JavaType { super("int", "Int", "Integer", "TypeID.RIOType.INT"); } + @Override String getTypeIDObjectString() { return "org.apache.hadoop.record.meta.TypeID.IntTypeID"; } + @Override void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("{\n"); cb.append("int i = org.apache.hadoop.record.Utils.readVInt("+b+", "+s+");\n"); @@ -50,6 +52,7 @@ void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("}\n"); } + @Override void genCompareBytes(CodeBuffer cb) { cb.append("{\n"); cb.append("int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);\n"); @@ -70,6 +73,7 @@ class CppInt extends CppType { super("int32_t"); } + @Override String getTypeIDObjectString() { return "new ::hadoop::TypeID(::hadoop::RIOTYPE_INT)"; } @@ -82,6 +86,7 @@ public JInt() { setCType(new CType()); } + @Override String getSignature() { return "i"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JLong.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JLong.java index ca09f053b4..f540fc808d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JLong.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JLong.java @@ -37,15 +37,18 @@ class JavaLong extends JavaType { super("long", "Long", "Long", "TypeID.RIOType.LONG"); } + @Override String getTypeIDObjectString() { return "org.apache.hadoop.record.meta.TypeID.LongTypeID"; } + @Override void genHashCode(CodeBuffer cb, String fname) { cb.append(Consts.RIO_PREFIX + "ret = (int) ("+fname+"^("+ fname+">>>32));\n"); } + @Override void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("{\n"); cb.append("long i = org.apache.hadoop.record.Utils.readVLong("+b+", "+s+");\n"); @@ -54,6 +57,7 @@ void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("}\n"); } + @Override void genCompareBytes(CodeBuffer cb) { cb.append("{\n"); cb.append("long i1 = org.apache.hadoop.record.Utils.readVLong(b1, s1);\n"); @@ -74,6 +78,7 @@ class CppLong extends CppType { super("int64_t"); } + @Override String getTypeIDObjectString() { return "new ::hadoop::TypeID(::hadoop::RIOTYPE_LONG)"; } @@ -86,6 +91,7 @@ public JLong() { setCType(new CType()); } + @Override String getSignature() { return "l"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JMap.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JMap.java index 4758accb51..03dcad322e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JMap.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JMap.java @@ -59,17 +59,20 @@ class JavaMap extends JavaCompType { this.value = value; } + @Override String getTypeIDObjectString() { return "new org.apache.hadoop.record.meta.MapTypeID(" + key.getTypeIDObjectString() + ", " + value.getTypeIDObjectString() + ")"; } + @Override void genSetRTIFilter(CodeBuffer cb, Map nestedStructMap) { key.genSetRTIFilter(cb, nestedStructMap); value.genSetRTIFilter(cb, nestedStructMap); } + @Override void genCompareTo(CodeBuffer cb, String fname, String other) { String setType = "java.util.Set<"+key.getWrapperType()+"> "; String iterType = "java.util.Iterator<"+key.getWrapperType()+"> "; @@ -98,6 +101,7 @@ void genCompareTo(CodeBuffer cb, String fname, String other) { cb.append("}\n"); } + @Override void genReadMethod(CodeBuffer cb, String fname, String tag, boolean decl) { if (decl) { cb.append(getType()+" "+fname+";\n"); @@ -122,6 +126,7 @@ void genReadMethod(CodeBuffer cb, String fname, String tag, boolean decl) { cb.append("}\n"); } + @Override void genWriteMethod(CodeBuffer cb, String fname, String tag) { String setType = "java.util.Set> "; @@ -153,6 +158,7 @@ void genWriteMethod(CodeBuffer cb, String fname, String tag) { decrLevel(); } + @Override void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("{\n"); incrLevel(); @@ -170,6 +176,7 @@ void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("}\n"); } + @Override void genCompareBytes(CodeBuffer cb) { cb.append("{\n"); incrLevel(); @@ -208,12 +215,14 @@ class CppMap extends CppCompType { this.value = value; } + @Override String getTypeIDObjectString() { return "new ::hadoop::MapTypeID(" + key.getTypeIDObjectString() + ", " + value.getTypeIDObjectString() + ")"; } + @Override void genSetRTIFilter(CodeBuffer cb) { key.genSetRTIFilter(cb); value.genSetRTIFilter(cb); @@ -230,6 +239,7 @@ public JMap(JType t1, JType t2) { valueType = t2; } + @Override String getSignature() { return "{" + keyType.getSignature() + valueType.getSignature() +"}"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JRecord.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JRecord.java index 647d3a7baa..80e545ba3a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JRecord.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JRecord.java @@ -54,11 +54,13 @@ class JavaRecord extends JavaCompType { } } + @Override String getTypeIDObjectString() { return "new org.apache.hadoop.record.meta.StructTypeID(" + fullName + ".getTypeInfo())"; } + @Override void genSetRTIFilter(CodeBuffer cb, Map nestedStructMap) { // ignore, if we'ev already set the type filter for this record if (!nestedStructMap.containsKey(fullName)) { @@ -129,6 +131,7 @@ void genSetupRtiFields(CodeBuffer cb) { cb.append("}\n"); } + @Override void genReadMethod(CodeBuffer cb, String fname, String tag, boolean decl) { if (decl) { cb.append(fullName+" "+fname+";\n"); @@ -137,10 +140,12 @@ void genReadMethod(CodeBuffer cb, String fname, String tag, boolean decl) { cb.append(fname+".deserialize(" + Consts.RECORD_INPUT + ",\""+tag+"\");\n"); } + @Override void genWriteMethod(CodeBuffer cb, String fname, String tag) { cb.append(fname+".serialize(" + Consts.RECORD_OUTPUT + ",\""+tag+"\");\n"); } + @Override void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("{\n"); cb.append("int r = "+fullName+ @@ -149,6 +154,7 @@ void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("}\n"); } + @Override void genCompareBytes(CodeBuffer cb) { cb.append("{\n"); cb.append("int r1 = "+fullName+ @@ -492,6 +498,7 @@ class CppRecord extends CppCompType { } } + @Override String getTypeIDObjectString() { return "new ::hadoop::StructTypeID(" + fullName + "::getTypeInfo().getFieldTypeInfos())"; @@ -501,6 +508,7 @@ String genDecl(String fname) { return " "+name+" "+fname+";\n"; } + @Override void genSetRTIFilter(CodeBuffer cb) { // we set the RTI filter here cb.append(fullName + "::setTypeFilter(rti.getNestedStructTypeInfo(\""+ @@ -797,6 +805,7 @@ public JRecord(String name, ArrayList> flist) { signature = sb.toString(); } + @Override String getSignature() { return signature; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JString.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JString.java index 5c712e963c..cd3ab3dc35 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JString.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JString.java @@ -36,10 +36,12 @@ class JavaString extends JavaCompType { super("String", "String", "String", "TypeID.RIOType.STRING"); } + @Override String getTypeIDObjectString() { return "org.apache.hadoop.record.meta.TypeID.StringTypeID"; } + @Override void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("{\n"); cb.append("int i = org.apache.hadoop.record.Utils.readVInt("+b+", "+s+");\n"); @@ -48,6 +50,7 @@ void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("}\n"); } + @Override void genCompareBytes(CodeBuffer cb) { cb.append("{\n"); cb.append("int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);\n"); @@ -61,6 +64,7 @@ void genCompareBytes(CodeBuffer cb) { cb.append("}\n"); } + @Override void genClone(CodeBuffer cb, String fname) { cb.append(Consts.RIO_PREFIX + "other."+fname+" = this."+fname+";\n"); } @@ -72,6 +76,7 @@ class CppString extends CppCompType { super("::std::string"); } + @Override String getTypeIDObjectString() { return "new ::hadoop::TypeID(::hadoop::RIOTYPE_STRING)"; } @@ -84,6 +89,7 @@ public JString() { setCType(new CCompType()); } + @Override String getSignature() { return "s"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JVector.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JVector.java index 8bce5cc91f..46ecbada51 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JVector.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JVector.java @@ -54,15 +54,18 @@ class JavaVector extends JavaCompType { element = t; } + @Override String getTypeIDObjectString() { return "new org.apache.hadoop.record.meta.VectorTypeID(" + element.getTypeIDObjectString() + ")"; } + @Override void genSetRTIFilter(CodeBuffer cb, Map nestedStructMap) { element.genSetRTIFilter(cb, nestedStructMap); } + @Override void genCompareTo(CodeBuffer cb, String fname, String other) { cb.append("{\n"); incrLevel(); @@ -92,6 +95,7 @@ void genCompareTo(CodeBuffer cb, String fname, String other) { cb.append("}\n"); } + @Override void genReadMethod(CodeBuffer cb, String fname, String tag, boolean decl) { if (decl) { cb.append(getType()+" "+fname+";\n"); @@ -113,6 +117,7 @@ void genReadMethod(CodeBuffer cb, String fname, String tag, boolean decl) { cb.append("}\n"); } + @Override void genWriteMethod(CodeBuffer cb, String fname, String tag) { cb.append("{\n"); incrLevel(); @@ -131,6 +136,7 @@ void genWriteMethod(CodeBuffer cb, String fname, String tag) { decrLevel(); } + @Override void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("{\n"); incrLevel(); @@ -146,6 +152,7 @@ void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("}\n"); } + @Override void genCompareBytes(CodeBuffer cb) { cb.append("{\n"); incrLevel(); @@ -179,11 +186,13 @@ class CppVector extends CppCompType { element = t; } + @Override String getTypeIDObjectString() { return "new ::hadoop::VectorTypeID(" + element.getTypeIDObjectString() + ")"; } + @Override void genSetRTIFilter(CodeBuffer cb) { element.genSetRTIFilter(cb); } @@ -198,6 +207,7 @@ public JVector(JType t) { setCType(new CCompType()); } + @Override String getSignature() { return "[" + type.getSignature() + "]"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JavaGenerator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JavaGenerator.java index 04c4bd8473..6d51df6cd1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JavaGenerator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JavaGenerator.java @@ -39,6 +39,7 @@ class JavaGenerator extends CodeGenerator { * @param rlist List of records defined within this file * @param destDir output directory */ + @Override void genCode(String name, ArrayList ilist, ArrayList rlist, String destDir, ArrayList options) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/ant/RccTask.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/ant/RccTask.java index 5f999ecb88..869e0594f7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/ant/RccTask.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/ant/RccTask.java @@ -110,6 +110,7 @@ public void addFileset(FileSet set) { /** * Invoke the Hadoop record compiler on each record definition file */ + @Override public void execute() throws BuildException { if (src == null && filesets.size()==0) { throw new BuildException("There must be a file attribute or a fileset child element"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/ParseException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/ParseException.java index 3d4a82bac6..3af5910ccb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/ParseException.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/ParseException.java @@ -120,6 +120,7 @@ public ParseException(String message) { * of the final stack trace, and hence the correct error message * gets displayed. */ + @Override public String getMessage() { if (!specialConstructor) { return super.getMessage(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/Rcc.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/Rcc.java index fcac0997d6..c4c74cd651 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/Rcc.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/Rcc.java @@ -24,7 +24,6 @@ import org.apache.hadoop.record.compiler.*; import java.util.ArrayList; import java.util.Hashtable; -import java.util.Iterator; import java.io.File; import java.io.FileReader; import java.io.FileNotFoundException; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/RccTokenManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/RccTokenManager.java index 72acd13f74..7488606fe9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/RccTokenManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/RccTokenManager.java @@ -20,14 +20,6 @@ package org.apache.hadoop.record.compiler.generated; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.record.compiler.*; -import java.util.ArrayList; -import java.util.Hashtable; -import java.util.Iterator; -import java.io.File; -import java.io.FileReader; -import java.io.FileNotFoundException; -import java.io.IOException; /** * @deprecated Replaced by Avro. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/Token.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/Token.java index 37df5b97e0..1396bf899b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/Token.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/Token.java @@ -78,6 +78,7 @@ public class Token { /** * Returns the image. */ + @Override public String toString() { return image; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/TokenMgrError.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/TokenMgrError.java index 4b0712e82f..b6da7dadcd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/TokenMgrError.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/generated/TokenMgrError.java @@ -138,6 +138,7 @@ protected static String LexicalError(boolean EOFSeen, int lexState, int errorLin * * from this method for such cases in the release version of your parser. */ + @Override public String getMessage() { return super.getMessage(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/FieldTypeInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/FieldTypeInfo.java index f7f4fb0d02..32436abf82 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/FieldTypeInfo.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/FieldTypeInfo.java @@ -69,6 +69,7 @@ void write(RecordOutput rout, String tag) throws IOException { /** * Two FieldTypeInfos are equal if ach of their fields matches */ + @Override public boolean equals(Object o) { if (this == o) return true; @@ -87,6 +88,7 @@ public boolean equals(Object o) { * We use a basic hashcode implementation, since this class will likely not * be used as a hashmap key */ + @Override public int hashCode() { return 37*17+typeID.hashCode() + 37*17+fieldID.hashCode(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/MapTypeID.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/MapTypeID.java index 3a83d0896c..f9c5320cfb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/MapTypeID.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/MapTypeID.java @@ -19,8 +19,6 @@ package org.apache.hadoop.record.meta; import java.io.IOException; -import java.util.*; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.record.RecordOutput; @@ -58,6 +56,7 @@ public TypeID getValueTypeID() { return this.typeIDValue; } + @Override void write(RecordOutput rout, String tag) throws IOException { rout.writeByte(typeVal, tag); typeIDKey.write(rout, tag); @@ -68,6 +67,7 @@ void write(RecordOutput rout, String tag) throws IOException { * Two map typeIDs are equal if their constituent elements have the * same type */ + @Override public boolean equals(Object o) { if (!super.equals(o)) return false; @@ -82,6 +82,7 @@ public boolean equals(Object o) { * We use a basic hashcode implementation, since this class will likely not * be used as a hashmap key */ + @Override public int hashCode() { return 37*17+typeIDKey.hashCode() + 37*17+typeIDValue.hashCode(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/RecordTypeInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/RecordTypeInfo.java index 3bd153cdc3..8a9d0b5fbb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/RecordTypeInfo.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/RecordTypeInfo.java @@ -122,6 +122,7 @@ public RecordTypeInfo getNestedStructTypeInfo(String name) { /** * Serialize the type information for a record */ + @Override public void serialize(RecordOutput rout, String tag) throws IOException { // write out any header, version info, here rout.startRecord(this, tag); @@ -133,6 +134,7 @@ public void serialize(RecordOutput rout, String tag) throws IOException { /** * Deserialize the type information for a record */ + @Override public void deserialize(RecordInput rin, String tag) throws IOException { // read in any header, version info rin.startRecord(tag); @@ -148,6 +150,7 @@ public void deserialize(RecordInput rin, String tag) throws IOException { * So we always throw an exception. * Not implemented. Always returns 0 if another RecordTypeInfo is passed in. */ + @Override public int compareTo (final Object peer_) throws ClassCastException { if (!(peer_ instanceof RecordTypeInfo)) { throw new ClassCastException("Comparing different types of records."); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/StructTypeID.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/StructTypeID.java index b7d19ea815..d2c9ccdc75 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/StructTypeID.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/StructTypeID.java @@ -72,6 +72,7 @@ StructTypeID findStruct(String name) { return null; } + @Override void write(RecordOutput rout, String tag) throws IOException { rout.writeByte(typeVal, tag); writeRest(rout, tag); @@ -155,9 +156,11 @@ private TypeID genericReadTypeID(RecordInput rin, String tag) throws IOException } } + @Override public boolean equals(Object o) { return super.equals(o); } + @Override public int hashCode() { return super.hashCode(); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/TypeID.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/TypeID.java index ea2e35eb79..5a76eb4bd1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/TypeID.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/TypeID.java @@ -89,6 +89,7 @@ void write(RecordOutput rout, String tag) throws IOException { /** * Two base typeIDs are equal if they refer to the same type */ + @Override public boolean equals(Object o) { if (this == o) return true; @@ -107,6 +108,7 @@ public boolean equals(Object o) { * We use a basic hashcode implementation, since this class will likely not * be used as a hashmap key */ + @Override public int hashCode() { // See 'Effectve Java' by Joshua Bloch return 37*17+(int)typeVal; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/VectorTypeID.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/VectorTypeID.java index 88f820b8b8..22ab07efdc 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/VectorTypeID.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/meta/VectorTypeID.java @@ -43,6 +43,7 @@ public TypeID getElementTypeID() { return this.typeIDElement; } + @Override void write(RecordOutput rout, String tag) throws IOException { rout.writeByte(typeVal, tag); typeIDElement.write(rout, tag); @@ -52,6 +53,7 @@ void write(RecordOutput rout, String tag) throws IOException { * Two vector typeIDs are equal if their constituent elements have the * same type */ + @Override public boolean equals(Object o) { if (!super.equals (o)) return false; @@ -64,6 +66,7 @@ public boolean equals(Object o) { * We use a basic hashcode implementation, since this class will likely not * be used as a hashmap key */ + @Override public int hashCode() { return 37*17+typeIDElement.hashCode(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/RefreshUserMappingsProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/RefreshUserMappingsProtocol.java index 0e1c0864f5..b72e3ed6df 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/RefreshUserMappingsProtocol.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/RefreshUserMappingsProtocol.java @@ -22,7 +22,6 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.CommonConfigurationKeys; -import org.apache.hadoop.ipc.VersionedProtocol; import org.apache.hadoop.security.KerberosInfo; /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java index fa82664bdd..7ee452316a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java @@ -189,6 +189,7 @@ public SaslInputStream(InputStream inStream, SaslClient saslClient) { * @exception IOException * if an I/O error occurs. */ + @Override public int read() throws IOException { if (!useWrap) { return inStream.read(); @@ -220,6 +221,7 @@ public int read() throws IOException { * @exception IOException * if an I/O error occurs. */ + @Override public int read(byte[] b) throws IOException { return read(b, 0, b.length); } @@ -242,6 +244,7 @@ public int read(byte[] b) throws IOException { * @exception IOException * if an I/O error occurs. */ + @Override public int read(byte[] b, int off, int len) throws IOException { if (!useWrap) { return inStream.read(b, off, len); @@ -286,6 +289,7 @@ public int read(byte[] b, int off, int len) throws IOException { * @exception IOException * if an I/O error occurs. */ + @Override public long skip(long n) throws IOException { if (!useWrap) { return inStream.skip(n); @@ -312,6 +316,7 @@ public long skip(long n) throws IOException { * @exception IOException * if an I/O error occurs. */ + @Override public int available() throws IOException { if (!useWrap) { return inStream.available(); @@ -329,6 +334,7 @@ public int available() throws IOException { * @exception IOException * if an I/O error occurs. */ + @Override public void close() throws IOException { disposeSasl(); ostart = 0; @@ -344,6 +350,7 @@ public void close() throws IOException { * @return false, since this class does not support the * mark and reset methods. */ + @Override public boolean markSupported() { return false; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslOutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslOutputStream.java index 4a0f3cb42c..494ba1e7a4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslOutputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslOutputStream.java @@ -19,9 +19,7 @@ package org.apache.hadoop.security; import java.io.BufferedOutputStream; -import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; -import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.io.OutputStream; @@ -122,6 +120,7 @@ private void disposeSasl() throws SaslException { * @exception IOException * if an I/O error occurs. */ + @Override public void write(int b) throws IOException { if (!useWrap) { outStream.write(b); @@ -146,6 +145,7 @@ public void write(int b) throws IOException { * @exception IOException * if an I/O error occurs. */ + @Override public void write(byte[] b) throws IOException { write(b, 0, b.length); } @@ -163,6 +163,7 @@ public void write(byte[] b) throws IOException { * @exception IOException * if an I/O error occurs. */ + @Override public void write(byte[] inBuf, int off, int len) throws IOException { if (!useWrap) { outStream.write(inBuf, off, len); @@ -197,6 +198,7 @@ public void write(byte[] inBuf, int off, int len) throws IOException { * @exception IOException * if an I/O error occurs. */ + @Override public void flush() throws IOException { outStream.flush(); } @@ -208,6 +210,7 @@ public void flush() throws IOException { * @exception IOException * if an I/O error occurs. */ + @Override public void close() throws IOException { disposeSasl(); outStream.close(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java index 54b1502acc..98b3f5db29 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java @@ -239,6 +239,7 @@ public SaslClientCallbackHandler(Token token) { this.userPassword = SaslRpcServer.encodePassword(token.getPassword()); } + @Override public void handle(Callback[] callbacks) throws UnsupportedCallbackException { NameCallback nc = null; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java index b0588c27fd..31718628f2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java @@ -194,7 +194,6 @@ private char[] getPassword(TokenIdentifier tokenid) throws InvalidToken { return encodePassword(secretManager.retrievePassword(tokenid)); } - /** {@inheritDoc} */ @Override public void handle(Callback[] callbacks) throws InvalidToken, UnsupportedCallbackException { @@ -253,7 +252,6 @@ public void handle(Callback[] callbacks) throws InvalidToken, @InterfaceStability.Evolving public static class SaslGssCallbackHandler implements CallbackHandler { - /** {@inheritDoc} */ @Override public void handle(Callback[] callbacks) throws UnsupportedCallbackException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java index 2f65892db7..25bae83b1e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java @@ -498,6 +498,7 @@ interface HostResolver { * Uses standard java host resolution */ static class StandardHostResolver implements HostResolver { + @Override public InetAddress getByName(String host) throws UnknownHostException { return InetAddress.getByName(host); } @@ -542,6 +543,7 @@ protected static class QualifiedHostResolver implements HostResolver { * @return InetAddress with the fully qualified hostname or ip * @throws UnknownHostException if host does not exist */ + @Override public InetAddress getByName(String host) throws UnknownHostException { InetAddress addr = null; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java index 1b14927bd7..6335fc7146 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java @@ -20,10 +20,7 @@ import java.io.IOException; import java.util.LinkedList; import java.util.List; -import java.util.Map; import java.util.StringTokenizer; -import java.util.concurrent.ConcurrentHashMap; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java index 7e60bed26a..0ee1c60c59 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java @@ -20,12 +20,6 @@ import java.io.IOException; import java.util.LinkedList; import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.HashSet; -import java.util.StringTokenizer; -import java.util.concurrent.ConcurrentHashMap; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java index 0d3c482289..184b40d8ed 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java @@ -19,7 +19,6 @@ import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION; -import java.io.File; import java.io.IOException; import java.lang.reflect.UndeclaredThrowableException; import java.security.AccessControlContext; @@ -33,7 +32,6 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; @@ -344,6 +342,7 @@ private static class RealUser implements Principal { this.realUser = realUser; } + @Override public String getName() { return realUser.getUserName(); } @@ -700,6 +699,7 @@ private void spawnAutoRenewalThreadForUserCreds() { !isKeytab) { Thread t = new Thread(new Runnable() { + @Override public void run() { String cmd = conf.get("hadoop.kerberos.kinit.command", "kinit"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java index 922d330842..e23612ec0f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java @@ -48,6 +48,7 @@ public class AccessControlList implements Writable { WritableFactories.setFactory (AccessControlList.class, new WritableFactory() { + @Override public Writable newInstance() { return new AccessControlList(); } }); } @@ -318,6 +319,7 @@ public String getAclString() { /** * Serializes the AccessControlList object */ + @Override public void write(DataOutput out) throws IOException { String aclString = getAclString(); Text.writeString(out, aclString); @@ -326,6 +328,7 @@ public void write(DataOutput out) throws IOException { /** * Deserializes the AccessControlList object */ + @Override public void readFields(DataInput in) throws IOException { String aclString = Text.readString(in); buildACL(aclString); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/PolicyProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/PolicyProvider.java index c2176e5989..6b86a05e7a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/PolicyProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/PolicyProvider.java @@ -42,6 +42,7 @@ public abstract class PolicyProvider { */ public static final PolicyProvider DEFAULT_POLICY_PROVIDER = new PolicyProvider() { + @Override public Service[] getServices() { return null; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/RefreshAuthorizationPolicyProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/RefreshAuthorizationPolicyProtocol.java index 9ab6d68daf..4407a7e8e3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/RefreshAuthorizationPolicyProtocol.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/RefreshAuthorizationPolicyProtocol.java @@ -22,7 +22,6 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.CommonConfigurationKeys; -import org.apache.hadoop.ipc.VersionedProtocol; import org.apache.hadoop.security.KerberosInfo; /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java index 00dd2021ee..4c17f9fd25 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java @@ -124,6 +124,7 @@ public Configuration getConf() { * @throws GeneralSecurityException thrown if the keystores could not be * initialized due to a security error. */ + @Override public void init(SSLFactory.Mode mode) throws IOException, GeneralSecurityException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java index 3f88fb89a7..c59000ea6a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java @@ -81,6 +81,7 @@ @InterfaceStability.Evolving public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier { + @Override boolean verify(String host, SSLSession session); void check(String host, SSLSocket ssl) throws IOException; @@ -125,12 +126,14 @@ void check(String[] hosts, String[] cns, String[] subjectAlts) */ public final static SSLHostnameVerifier DEFAULT = new AbstractVerifier() { + @Override public final void check(final String[] hosts, final String[] cns, final String[] subjectAlts) throws SSLException { check(hosts, cns, subjectAlts, false, false); } + @Override public final String toString() { return "DEFAULT"; } }; @@ -143,6 +146,7 @@ public final void check(final String[] hosts, final String[] cns, */ public final static SSLHostnameVerifier DEFAULT_AND_LOCALHOST = new AbstractVerifier() { + @Override public final void check(final String[] hosts, final String[] cns, final String[] subjectAlts) throws SSLException { @@ -152,6 +156,7 @@ public final void check(final String[] hosts, final String[] cns, check(hosts, cns, subjectAlts, false, false); } + @Override public final String toString() { return "DEFAULT_AND_LOCALHOST"; } }; @@ -173,12 +178,14 @@ public final void check(final String[] hosts, final String[] cns, */ public final static SSLHostnameVerifier STRICT = new AbstractVerifier() { + @Override public final void check(final String[] host, final String[] cns, final String[] subjectAlts) throws SSLException { check(host, cns, subjectAlts, false, true); } + @Override public final String toString() { return "STRICT"; } }; @@ -190,12 +197,14 @@ public final void check(final String[] host, final String[] cns, */ public final static SSLHostnameVerifier STRICT_IE6 = new AbstractVerifier() { + @Override public final void check(final String[] host, final String[] cns, final String[] subjectAlts) throws SSLException { check(host, cns, subjectAlts, true, true); } + @Override public final String toString() { return "STRICT_IE6"; } }; @@ -205,11 +214,13 @@ public final void check(final String[] host, final String[] cns, */ public final static SSLHostnameVerifier ALLOW_ALL = new AbstractVerifier() { + @Override public final void check(final String[] host, final String[] cns, final String[] subjectAlts) { // Allow everything - so never blowup. } + @Override public final String toString() { return "ALLOW_ALL"; } }; @@ -250,6 +261,7 @@ protected AbstractVerifier() {} * @param session SSLSession with the remote server * @return true if the host matched the one in the certificate. */ + @Override public boolean verify(String host, SSLSession session) { try { Certificate[] certs = session.getPeerCertificates(); @@ -262,20 +274,24 @@ public boolean verify(String host, SSLSession session) { } } + @Override public void check(String host, SSLSocket ssl) throws IOException { check(new String[]{host}, ssl); } + @Override public void check(String host, X509Certificate cert) throws SSLException { check(new String[]{host}, cert); } + @Override public void check(String host, String[] cns, String[] subjectAlts) throws SSLException { check(new String[]{host}, cns, subjectAlts); } + @Override public void check(String host[], SSLSocket ssl) throws IOException { if (host == null) { @@ -332,6 +348,7 @@ is presenting (e.g. edit "/etc/apache2/server.crt" check(host, x509); } + @Override public void check(String[] host, X509Certificate cert) throws SSLException { String[] cns = Certificates.getCNs(cert); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java index bbddf6fdc7..905c948da7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java @@ -195,7 +195,7 @@ public void setService(Text newService) { service = newService; } - /** {@inheritDoc} */ + @Override public void readFields(DataInput in) throws IOException { int len = WritableUtils.readVInt(in); if (identifier == null || identifier.length != len) { @@ -211,7 +211,7 @@ public void readFields(DataInput in) throws IOException { service.readFields(in); } - /** {@inheritDoc} */ + @Override public void write(DataOutput out) throws IOException { WritableUtils.writeVInt(out, identifier.length); out.write(identifier); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java index b3e367bdf2..6ec3b7e606 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java @@ -85,6 +85,7 @@ public AbstractDelegationTokenIdentifier(Text owner, Text renewer, Text realUser * * @return the username or owner */ + @Override public UserGroupInformation getUser() { if ( (owner == null) || ("".equals(owner.toString()))) { return null; @@ -150,7 +151,7 @@ static boolean isEqual(Object a, Object b) { return a == null ? b == null : a.equals(b); } - /** {@inheritDoc} */ + @Override public boolean equals(Object obj) { if (obj == this) { return true; @@ -168,11 +169,12 @@ && isEqual(this.renewer, that.renewer) return false; } - /** {@inheritDoc} */ + @Override public int hashCode() { return this.sequenceNumber; } + @Override public void readFields(DataInput in) throws IOException { byte version = in.readByte(); if (version != VERSION) { @@ -200,6 +202,7 @@ void writeImpl(DataOutput out) throws IOException { WritableUtils.writeVInt(out, masterKeyId); } + @Override public void write(DataOutput out) throws IOException { if (owner.getLength() > Text.DEFAULT_MAX_LEN) { throw new IOException("owner is too long to be serialized!"); @@ -213,6 +216,7 @@ public void write(DataOutput out) throws IOException { writeImpl(out); } + @Override public String toString() { StringBuilder buffer = new StringBuilder(); buffer diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java index 97530d10d0..29367a38ab 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java @@ -404,6 +404,7 @@ private class ExpiredTokenRemover extends Thread { private long lastMasterKeyUpdate; private long lastTokenCacheCleanup; + @Override public void run() { LOG.info("Starting expired delegation token remover thread, " + "tokenRemoverScanInterval=" + tokenRemoverScanInterval diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/DelegationKey.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/DelegationKey.java index 3b5705eb6d..3458b2df82 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/DelegationKey.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/DelegationKey.java @@ -91,6 +91,7 @@ public void setExpiryDate(long expiryDate) { /** */ + @Override public void write(DataOutput out) throws IOException { WritableUtils.writeVInt(out, keyId); WritableUtils.writeVLong(out, expiryDate); @@ -104,6 +105,7 @@ public void write(DataOutput out) throws IOException { /** */ + @Override public void readFields(DataInput in) throws IOException { keyId = WritableUtils.readVInt(in); expiryDate = WritableUtils.readVLong(in); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetUserMappingsProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetUserMappingsProtocol.java index 0f5bf7a513..c0c107933f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetUserMappingsProtocol.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetUserMappingsProtocol.java @@ -21,7 +21,6 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.ipc.VersionedProtocol; /** * Protocol implemented by the Name Node and Job Tracker which maps users to diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java index 4711ed2f56..a1e20d242d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java @@ -71,6 +71,7 @@ public class AsyncDiskService { public AsyncDiskService(String[] volumes) throws IOException { threadFactory = new ThreadFactory() { + @Override public Thread newThread(Runnable r) { return new Thread(threadGroup, r); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java index 4813847e84..7d321e8a29 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java @@ -240,19 +240,23 @@ static public int getChecksumHeaderSize() { return 1 + SIZE_OF_INTEGER; // type byte, bytesPerChecksum int } //Checksum Interface. Just a wrapper around member summer. + @Override public long getValue() { return summer.getValue(); } + @Override public void reset() { summer.reset(); inSum = 0; } + @Override public void update( byte[] b, int off, int len ) { if ( len > 0 ) { summer.update( b, off, len ); inSum += len; } } + @Override public void update( int b ) { summer.update( b ); inSum += 1; @@ -444,9 +448,13 @@ static class ChecksumNull implements Checksum { public ChecksumNull() {} //Dummy interface + @Override public long getValue() { return 0; } + @Override public void reset() {} + @Override public void update(byte[] b, int off, int len) {} + @Override public void update(int b) {} }; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HeapSort.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HeapSort.java index b9d2fc17ca..5a0fb27fe4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HeapSort.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HeapSort.java @@ -48,13 +48,12 @@ private static void downHeap(final IndexedSortable s, final int b, * Sort the given range of items using heap sort. * {@inheritDoc} */ + @Override public void sort(IndexedSortable s, int p, int r) { sort(s, p, r, null); } - /** - * {@inheritDoc} - */ + @Override public void sort(final IndexedSortable s, final int p, final int r, final Progressable rep) { final int N = r - p; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java index 536b6f27ab..9064357747 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java @@ -214,6 +214,7 @@ public synchronized void setStatus(String status) { this.status = status; } + @Override public String toString() { StringBuilder result = new StringBuilder(); toString(result); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32.java index a7a2d37c84..3dd30fe6b0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32.java @@ -46,17 +46,17 @@ public PureJavaCrc32() { reset(); } - /** {@inheritDoc} */ + @Override public long getValue() { return (~crc) & 0xffffffffL; } - /** {@inheritDoc} */ + @Override public void reset() { crc = 0xffffffff; } - /** {@inheritDoc} */ + @Override public void update(byte[] b, int off, int len) { int localCrc = crc; while(len > 7) { @@ -81,7 +81,7 @@ public void update(byte[] b, int off, int len) { crc = localCrc; } - /** {@inheritDoc} */ + @Override final public void update(int b) { crc = (crc >>> 8) ^ T8_0[(crc ^ b) & 0xff]; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32C.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32C.java index 3d52eae077..7fdfe1489f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32C.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32C.java @@ -42,18 +42,18 @@ public PureJavaCrc32C() { reset(); } - /** {@inheritDoc} */ + @Override public long getValue() { long ret = crc; return (~ret) & 0xffffffffL; } - /** {@inheritDoc} */ + @Override public void reset() { crc = 0xffffffff; } - /** {@inheritDoc} */ + @Override public void update(byte[] b, int off, int len) { int localCrc = crc; while(len > 7) { @@ -78,7 +78,7 @@ public void update(byte[] b, int off, int len) { crc = localCrc; } - /** {@inheritDoc} */ + @Override final public void update(int b) { crc = (crc >>> 8) ^ T8_0[(crc ^ b) & 0xff]; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/QuickSort.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/QuickSort.java index 5686f82d05..73d8d90d42 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/QuickSort.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/QuickSort.java @@ -52,13 +52,12 @@ protected static int getMaxDepth(int x) { * {@inheritDoc} If the recursion depth falls below {@link #getMaxDepth}, * then switch to {@link HeapSort}. */ + @Override public void sort(IndexedSortable s, int p, int r) { sort(s, p, r, null); } - /** - * {@inheritDoc} - */ + @Override public void sort(final IndexedSortable s, int p, int r, final Progressable rep) { sortInternal(s, p, r, rep, getMaxDepth(r - p)); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java index 4520cb264a..bf12de633f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java @@ -257,6 +257,7 @@ void moveData() { */ private static ThreadLocal cloneBuffers = new ThreadLocal() { + @Override protected synchronized CopyInCopyOutBuffer initialValue() { return new CopyInCopyOutBuffer(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java index d563c1d7d5..b8c16f214d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java @@ -30,7 +30,6 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.conf.Configuration; /** * A base class for running a Unix command. @@ -323,10 +322,12 @@ public void execute() throws IOException { this.run(); } + @Override public String[] getExecString() { return command; } + @Override protected void parseExecResult(BufferedReader lines) throws IOException { output = new StringBuffer(); char[] buf = new char[512]; @@ -348,6 +349,7 @@ public String getOutput() { * * @return a string representation of the object. */ + @Override public String toString() { StringBuilder builder = new StringBuilder(); String[] args = getExecString(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Filter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Filter.java index e39463fc84..f183a4c53c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Filter.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Filter.java @@ -193,6 +193,7 @@ public void add(Key[] keys){ // Writable interface + @Override public void write(DataOutput out) throws IOException { out.writeInt(VERSION); out.writeInt(this.nbHash); @@ -200,6 +201,7 @@ public void write(DataOutput out) throws IOException { out.writeInt(this.vectorSize); } + @Override public void readFields(DataInput in) throws IOException { int ver = in.readInt(); if (ver > 0) { // old unversioned format diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java index 1ff5b82600..7ac134c76c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/Key.java @@ -154,12 +154,14 @@ public int hashCode() { // Writable + @Override public void write(DataOutput out) throws IOException { out.writeInt(bytes.length); out.write(bytes); out.writeDouble(weight); } + @Override public void readFields(DataInput in) throws IOException { this.bytes = new byte[in.readInt()]; in.readFully(this.bytes); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java index 8e867c4cbb..bf4891378c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java @@ -81,6 +81,7 @@ private static long rot(long val, int pos) { *

Use for hash table lookup, or anything where one collision in 2^^32 is * acceptable. Do NOT use for cryptographic purposes. */ + @Override @SuppressWarnings("fallthrough") public int hash(byte[] key, int nbytes, int initval) { int length = nbytes; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/MurmurHash.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/MurmurHash.java index 8e79f1aa5a..6ed3dfd3df 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/MurmurHash.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/MurmurHash.java @@ -37,6 +37,7 @@ public static Hash getInstance() { return _instance; } + @Override public int hash(byte[] data, int length, int seed) { int m = 0x5bd1e995; int r = 24; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommand.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommand.java index 202b2429cb..50cb3a53c5 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommand.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommand.java @@ -24,5 +24,6 @@ public interface CLICommand { public CommandExecutor getExecutor(String tag) throws IllegalArgumentException; public CLICommandTypes getType(); public String getCmd(); + @Override public String toString(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLITestCmd.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLITestCmd.java index 55e99b51a6..602a07f3d5 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLITestCmd.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLITestCmd.java @@ -31,6 +31,7 @@ public CLITestCmd(String str, CLICommandTypes type) { this.type = type; } + @Override public CommandExecutor getExecutor(String tag) throws IllegalArgumentException { if (getType() instanceof CLICommandFS) return new FSCmdExecutor(tag, new FsShell()); @@ -38,12 +39,17 @@ public CommandExecutor getExecutor(String tag) throws IllegalArgumentException { IllegalArgumentException("Unknown type of test command: " + getType()); } + @Override public CLICommandTypes getType() { return type; } + + @Override public String getCmd() { return cmd; } + + @Override public String toString() { return cmd; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/FSCmdExecutor.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/FSCmdExecutor.java index 86e86b6e1f..98237ac726 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/FSCmdExecutor.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/FSCmdExecutor.java @@ -29,6 +29,7 @@ public FSCmdExecutor(String namenode, FsShell shell) { this.shell = shell; } + @Override protected void execute(final String cmd) throws Exception{ String[] args = getCommandAsArgs(cmd, "NAMENODE", this.namenode); ToolRunner.run(shell, args); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java index 1928de44a4..1c22ee68c7 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfServlet.java @@ -24,7 +24,6 @@ import javax.xml.parsers.DocumentBuilderFactory; import org.mortbay.util.ajax.JSON; -import org.mortbay.util.ajax.JSON.Output; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java index 679ced34ee..27842be427 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java @@ -39,8 +39,6 @@ import junit.framework.TestCase; import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertNotNull; - import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration.IntegerRanges; import org.apache.hadoop.fs.Path; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationDeprecation.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationDeprecation.java index df346dd657..014844e28b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationDeprecation.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationDeprecation.java @@ -19,8 +19,6 @@ package org.apache.hadoop.conf; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestDeprecatedKeys.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestDeprecatedKeys.java index b8f820c024..3036d0c839 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestDeprecatedKeys.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestDeprecatedKeys.java @@ -18,9 +18,6 @@ package org.apache.hadoop.conf; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - import java.io.ByteArrayOutputStream; import java.util.Map; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestReconfiguration.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestReconfiguration.java index 2cfb56a416..f4367523cb 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestReconfiguration.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestReconfiguration.java @@ -99,17 +99,11 @@ public ReconfigurableDummy(Configuration conf) { super(conf); } - /** - * {@inheritDoc} - */ @Override public Collection getReconfigurableProperties() { return Arrays.asList(PROP1, PROP2, PROP4); } - /** - * {@inheritDoc} - */ @Override public synchronized void reconfigurePropertyImpl(String property, String newVal) { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java index 6c50100901..e9677badc3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java @@ -67,6 +67,7 @@ public abstract class FSMainOperationsBaseTest { protected static FileSystem fSys; final private static PathFilter DEFAULT_FILTER = new PathFilter() { + @Override public boolean accept(final Path file) { return true; } @@ -74,6 +75,7 @@ public boolean accept(final Path file) { //A test filter with returns any path containing a "b" final private static PathFilter TEST_X_FILTER = new PathFilter() { + @Override public boolean accept(Path file) { if(file.getName().contains("x") || file.getName().contains("X")) return true; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java index 150b68e35d..bf60e02cd6 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java @@ -67,6 +67,7 @@ public abstract class FileContextMainOperationsBaseTest { protected static FileContext fc; final private static PathFilter DEFAULT_FILTER = new PathFilter() { + @Override public boolean accept(final Path file) { return true; } @@ -74,6 +75,7 @@ public boolean accept(final Path file) { //A test filter with returns any path containing a "b" final private static PathFilter TEST_X_FILTER = new PathFilter() { + @Override public boolean accept(Path file) { if(file.getName().contains("x") || file.getName().contains("X")) return true; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java index 39ae24659b..b80764cebf 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java @@ -176,6 +176,7 @@ public void testUgi() throws IOException, InterruptedException { .createRemoteUser("otherUser"); FileContext newFc = otherUser.doAs(new PrivilegedExceptionAction() { + @Override public FileContext run() throws Exception { FileContext newFc = FileContext.getFileContext(); return newFc; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextURIBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextURIBase.java index 5786a6653c..0acd416dd8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextURIBase.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextURIBase.java @@ -20,8 +20,6 @@ import java.io.*; import java.util.ArrayList; -import java.util.Iterator; - import junit.framework.Assert; import org.apache.hadoop.fs.permission.FsPermission; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java index 5d2c595f5c..3e5970d228 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java @@ -19,7 +19,6 @@ package org.apache.hadoop.fs; import java.io.BufferedWriter; -import java.io.IOException; import java.io.OutputStreamWriter; import org.apache.hadoop.conf.Configuration; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java index ffb1dcf1f1..de3d5566eb 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java @@ -29,11 +29,13 @@ public class TestDU extends TestCase { final static private File DU_DIR = new File( System.getProperty("test.build.data","/tmp"), "dutmp"); + @Override public void setUp() { FileUtil.fullyDelete(DU_DIR); assertTrue(DU_DIR.mkdirs()); } + @Override public void tearDown() throws IOException { FileUtil.fullyDelete(DU_DIR); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFSMainOperationsLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFSMainOperationsLocalFileSystem.java index 128c1fb088..e0ee5f03f2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFSMainOperationsLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFSMainOperationsLocalFileSystem.java @@ -28,6 +28,7 @@ public class TestFSMainOperationsLocalFileSystem extends FSMainOperationsBaseTest { + @Override @Before public void setUp() throws Exception { fSys = FileSystem.getLocal(new Configuration()); @@ -35,12 +36,14 @@ public void setUp() throws Exception { } static Path wd = null; + @Override protected Path getDefaultWorkingDirectory() throws IOException { if (wd == null) wd = FileSystem.getLocal(new Configuration()).getWorkingDirectory(); return wd; } + @Override @After public void tearDown() throws Exception { super.tearDown(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFcLocalFsPermission.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFcLocalFsPermission.java index 335f403fe7..439ce2c15c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFcLocalFsPermission.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFcLocalFsPermission.java @@ -26,12 +26,14 @@ public class TestFcLocalFsPermission extends FileContextPermissionBase { + @Override @Before public void setUp() throws Exception { fc = FileContext.getLocalFSFileContext(); super.setUp(); } + @Override @After public void tearDown() throws Exception { super.tearDown(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFcLocalFsUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFcLocalFsUtil.java index bc1126f231..29b6463806 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFcLocalFsUtil.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFcLocalFsUtil.java @@ -25,6 +25,7 @@ public class TestFcLocalFsUtil extends FileContextUtilBase { + @Override @Before public void setUp() throws Exception { fc = FileContext.getLocalFSFileContext(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCaching.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCaching.java index 7e5f99f5fb..8dff124d7e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCaching.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCaching.java @@ -110,6 +110,7 @@ public void testDefaultFsUris() throws Exception { public static class InitializeForeverFileSystem extends LocalFileSystem { final static Semaphore sem = new Semaphore(0); + @Override public void initialize(URI uri, Configuration conf) throws IOException { // notify that InitializeForeverFileSystem started initialization sem.release(); @@ -127,6 +128,7 @@ public void initialize(URI uri, Configuration conf) throws IOException { public void testCacheEnabledWithInitializeForeverFS() throws Exception { final Configuration conf = new Configuration(); Thread t = new Thread() { + @Override public void run() { conf.set("fs.localfs1.impl", "org.apache.hadoop.fs." + "TestFileSystemCaching$InitializeForeverFileSystem"); @@ -167,11 +169,13 @@ public void testCacheForUgi() throws Exception { UserGroupInformation ugiA = UserGroupInformation.createRemoteUser("foo"); UserGroupInformation ugiB = UserGroupInformation.createRemoteUser("bar"); FileSystem fsA = ugiA.doAs(new PrivilegedExceptionAction() { + @Override public FileSystem run() throws Exception { return FileSystem.get(new URI("cachedfile://a"), conf); } }); FileSystem fsA1 = ugiA.doAs(new PrivilegedExceptionAction() { + @Override public FileSystem run() throws Exception { return FileSystem.get(new URI("cachedfile://a"), conf); } @@ -180,6 +184,7 @@ public FileSystem run() throws Exception { assertSame(fsA, fsA1); FileSystem fsB = ugiB.doAs(new PrivilegedExceptionAction() { + @Override public FileSystem run() throws Exception { return FileSystem.get(new URI("cachedfile://a"), conf); } @@ -192,6 +197,7 @@ public FileSystem run() throws Exception { UserGroupInformation ugiA2 = UserGroupInformation.createRemoteUser("foo"); fsA = ugiA2.doAs(new PrivilegedExceptionAction() { + @Override public FileSystem run() throws Exception { return FileSystem.get(new URI("cachedfile://a"), conf); } @@ -203,6 +209,7 @@ public FileSystem run() throws Exception { ugiA.addToken(t1); fsA = ugiA.doAs(new PrivilegedExceptionAction() { + @Override public FileSystem run() throws Exception { return FileSystem.get(new URI("cachedfile://a"), conf); } @@ -245,12 +252,14 @@ public void testCloseAllForUGI() throws Exception { conf.set("fs.cachedfile.impl", FileSystem.getFileSystemClass("file", null).getName()); UserGroupInformation ugiA = UserGroupInformation.createRemoteUser("foo"); FileSystem fsA = ugiA.doAs(new PrivilegedExceptionAction() { + @Override public FileSystem run() throws Exception { return FileSystem.get(new URI("cachedfile://a"), conf); } }); //Now we should get the cached filesystem FileSystem fsA1 = ugiA.doAs(new PrivilegedExceptionAction() { + @Override public FileSystem run() throws Exception { return FileSystem.get(new URI("cachedfile://a"), conf); } @@ -261,6 +270,7 @@ public FileSystem run() throws Exception { //Now we should get a different (newly created) filesystem fsA1 = ugiA.doAs(new PrivilegedExceptionAction() { + @Override public FileSystem run() throws Exception { return FileSystem.get(new URI("cachedfile://a"), conf); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsOptions.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsOptions.java index c66b4fa901..574ed704da 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsOptions.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsOptions.java @@ -19,8 +19,6 @@ import static org.junit.Assert.*; -import java.io.IOException; - import org.apache.hadoop.fs.Options.ChecksumOpt; import org.apache.hadoop.util.DataChecksum; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java index dcb5871761..c6812a1930 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java @@ -411,6 +411,7 @@ public FileStatus getFileStatus(Path p) throws IOException { } static class MyFsShell extends FsShell { + @Override protected void registerCommands(CommandFactory factory) { factory.addClass(InterruptCommand.class, "-testInterrupt"); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java index 6f3c270232..aae013fd77 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java @@ -18,7 +18,6 @@ package org.apache.hadoop.fs; import java.io.IOException; -import java.util.Iterator; import java.util.HashSet; import java.util.Random; import java.util.Set; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextCreateMkdir.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextCreateMkdir.java index e3402abee9..f5decbb2b0 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextCreateMkdir.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextCreateMkdir.java @@ -23,6 +23,7 @@ public class TestLocalFSFileContextCreateMkdir extends FileContextCreateMkdirBaseTest { + @Override @Before public void setUp() throws Exception { fc = FileContext.getLocalFSFileContext(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextMainOperations.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextMainOperations.java index 901b6c96ea..d1c272cc85 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextMainOperations.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextMainOperations.java @@ -27,6 +27,7 @@ public class TestLocalFSFileContextMainOperations extends FileContextMainOperationsBaseTest { + @Override @Before public void setUp() throws Exception { fc = FileContext.getLocalFSFileContext(); @@ -34,6 +35,7 @@ public void setUp() throws Exception { } static Path wd = null; + @Override protected Path getDefaultWorkingDirectory() throws IOException { if (wd == null) wd = FileSystem.getLocal(new Configuration()).getWorkingDirectory(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextSymlink.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextSymlink.java index 89684fe720..64d0525a18 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextSymlink.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextSymlink.java @@ -35,18 +35,22 @@ */ public class TestLocalFSFileContextSymlink extends FileContextSymlinkBaseTest { + @Override protected String getScheme() { return "file"; } + @Override protected String testBaseDir1() throws IOException { return getAbsoluteTestRootDir(fc)+"/test1"; } + @Override protected String testBaseDir2() throws IOException { return getAbsoluteTestRootDir(fc)+"/test2"; } + @Override protected URI testURI() { try { return new URI("file:///"); @@ -55,6 +59,7 @@ protected URI testURI() { } } + @Override @Before public void setUp() throws Exception { fc = FileContext.getLocalFSFileContext(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFsFCStatistics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFsFCStatistics.java index 35c23cb0f3..45e9bfb79c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFsFCStatistics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFsFCStatistics.java @@ -47,15 +47,18 @@ public void tearDown() throws Exception { fc.delete(getTestRootPath(fc, "test"), true); } + @Override protected void verifyReadBytes(Statistics stats) { Assert.assertEquals(blockSize, stats.getBytesRead()); } + @Override protected void verifyWrittenBytes(Statistics stats) { //Extra 12 bytes are written apart from the block. Assert.assertEquals(blockSize + 12, stats.getBytesWritten()); } + @Override protected URI getFsUri() { return URI.create(LOCAL_FS_ROOT_URI); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocal_S3FileContextURI.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocal_S3FileContextURI.java index 512567a8d5..6c417cdb7c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocal_S3FileContextURI.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocal_S3FileContextURI.java @@ -24,6 +24,7 @@ public class TestLocal_S3FileContextURI extends FileContextURIBase { + @Override @Before public void setUp() throws Exception { Configuration S3Conf = new Configuration(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestS3_LocalFileContextURI.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestS3_LocalFileContextURI.java index c6324f8dc9..22fa5b0629 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestS3_LocalFileContextURI.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestS3_LocalFileContextURI.java @@ -24,6 +24,7 @@ public class TestS3_LocalFileContextURI extends FileContextURIBase { + @Override @Before public void setUp() throws Exception { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java index 8bfa7185b0..70bd62fa00 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java @@ -67,6 +67,7 @@ protected static int countSameDeletedFiles(FileSystem fs, // filter that matches all the files that start with fileName* PathFilter pf = new PathFilter() { + @Override public boolean accept(Path file) { return file.getName().startsWith(prefix); } @@ -563,6 +564,7 @@ static class TestLFS extends LocalFileSystem { super(); this.home = home; } + @Override public Path getHomeDirectory() { return home; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/kfs/KFSEmulationImpl.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/kfs/KFSEmulationImpl.java index b8b6957266..baf25ded69 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/kfs/KFSEmulationImpl.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/kfs/KFSEmulationImpl.java @@ -39,16 +39,20 @@ public KFSEmulationImpl(Configuration conf) throws IOException { localFS = FileSystem.getLocal(conf); } + @Override public boolean exists(String path) throws IOException { return localFS.exists(new Path(path)); } + @Override public boolean isDirectory(String path) throws IOException { return localFS.isDirectory(new Path(path)); } + @Override public boolean isFile(String path) throws IOException { return localFS.isFile(new Path(path)); } + @Override public String[] readdir(String path) throws IOException { FileStatus[] p = localFS.listStatus(new Path(path)); try { @@ -64,10 +68,12 @@ public String[] readdir(String path) throws IOException { return entries; } + @Override public FileStatus[] readdirplus(Path path) throws IOException { return localFS.listStatus(path); } + @Override public int mkdirs(String path) throws IOException { if (localFS.mkdirs(new Path(path))) return 0; @@ -75,12 +81,14 @@ public int mkdirs(String path) throws IOException { return -1; } + @Override public int rename(String source, String dest) throws IOException { if (localFS.rename(new Path(source), new Path(dest))) return 0; return -1; } + @Override public int rmdir(String path) throws IOException { if (isDirectory(path)) { // the directory better be empty @@ -91,21 +99,26 @@ public int rmdir(String path) throws IOException { return -1; } + @Override public int remove(String path) throws IOException { if (isFile(path) && (localFS.delete(new Path(path), true))) return 0; return -1; } + @Override public long filesize(String path) throws IOException { return localFS.getFileStatus(new Path(path)).getLen(); } + @Override public short getReplication(String path) throws IOException { return 1; } + @Override public short setReplication(String path, short replication) throws IOException { return 1; } + @Override public String[][] getDataLocation(String path, long start, long len) throws IOException { BlockLocation[] blkLocations = localFS.getFileBlockLocations(localFS.getFileStatus(new Path(path)), @@ -123,6 +136,7 @@ public String[][] getDataLocation(String path, long start, long len) throws IOEx return hints; } + @Override public long getModificationTime(String path) throws IOException { FileStatus s = localFS.getFileStatus(new Path(path)); if (s == null) @@ -131,18 +145,21 @@ public long getModificationTime(String path) throws IOException { return s.getModificationTime(); } + @Override public FSDataOutputStream append(String path, int bufferSize, Progressable progress) throws IOException { // besides path/overwrite, the other args don't matter for // testing purposes. return localFS.append(new Path(path)); } + @Override public FSDataOutputStream create(String path, short replication, int bufferSize, Progressable progress) throws IOException { // besides path/overwrite, the other args don't matter for // testing purposes. return localFS.create(new Path(path)); } + @Override public FSDataInputStream open(String path, int bufferSize) throws IOException { return localFS.open(new Path(path)); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/kfs/TestKosmosFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/kfs/TestKosmosFileSystem.java index 3ff998f996..c1c676e9b0 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/kfs/TestKosmosFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/kfs/TestKosmosFileSystem.java @@ -18,21 +18,17 @@ package org.apache.hadoop.fs.kfs; -import java.io.*; -import java.net.*; +import java.io.IOException; +import java.net.URI; import junit.framework.TestCase; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.kfs.KosmosFileSystem; - public class TestKosmosFileSystem extends TestCase { KosmosFileSystem kosmosFileSystem; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/DataGenerator.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/DataGenerator.java index 5124211d34..3222cf43bb 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/DataGenerator.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/DataGenerator.java @@ -67,6 +67,7 @@ public class DataGenerator extends Configured implements Tool { * namespace. Afterwards it reads the file attributes and creates files * in the file. All file content is filled with 'a'. */ + @Override public int run(String[] args) throws Exception { int exitCode = 0; exitCode = init(args); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java index ea192c4849..7490be80af 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java @@ -186,6 +186,7 @@ private DFSClientThread(int id) { /** Main loop * Each iteration decides what's the next operation and then pauses. */ + @Override public void run() { try { while (shouldRun) { @@ -281,6 +282,7 @@ private void list() throws IOException { * Before exiting, it prints the average execution for * each operation and operation throughput. */ + @Override public int run(String[] args) throws Exception { int exitCode = init(args); if (exitCode != 0) { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/StructureGenerator.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/StructureGenerator.java index 689e01dbf3..71649a5941 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/StructureGenerator.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/StructureGenerator.java @@ -214,6 +214,7 @@ private FileINode(String name, double numOfBlocks) { } /** Output a file attribute */ + @Override protected void outputFiles(PrintStream out, String prefix) { prefix = (prefix == null)?super.name: prefix + "/"+super.name; out.println(prefix + " " + numOfBlocks); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3/InMemoryFileSystemStore.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3/InMemoryFileSystemStore.java index 84d142e089..8024c6acc7 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3/InMemoryFileSystemStore.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3/InMemoryFileSystemStore.java @@ -47,34 +47,42 @@ class InMemoryFileSystemStore implements FileSystemStore { private SortedMap inodes = new TreeMap(); private Map blocks = new HashMap(); + @Override public void initialize(URI uri, Configuration conf) { this.conf = conf; } + @Override public String getVersion() throws IOException { return "0"; } + @Override public void deleteINode(Path path) throws IOException { inodes.remove(normalize(path)); } + @Override public void deleteBlock(Block block) throws IOException { blocks.remove(block.getId()); } + @Override public boolean inodeExists(Path path) throws IOException { return inodes.containsKey(normalize(path)); } + @Override public boolean blockExists(long blockId) throws IOException { return blocks.containsKey(blockId); } + @Override public INode retrieveINode(Path path) throws IOException { return inodes.get(normalize(path)); } + @Override public File retrieveBlock(Block block, long byteRangeStart) throws IOException { byte[] data = blocks.get(block.getId()); File file = createTempFile(); @@ -100,6 +108,7 @@ private File createTempFile() throws IOException { return result; } + @Override public Set listSubPaths(Path path) throws IOException { Path normalizedPath = normalize(path); // This is inefficient but more than adequate for testing purposes. @@ -112,6 +121,7 @@ public Set listSubPaths(Path path) throws IOException { return subPaths; } + @Override public Set listDeepSubPaths(Path path) throws IOException { Path normalizedPath = normalize(path); String pathString = normalizedPath.toUri().getPath(); @@ -128,10 +138,12 @@ public Set listDeepSubPaths(Path path) throws IOException { return subPaths; } + @Override public void storeINode(Path path, INode inode) throws IOException { inodes.put(normalize(path), inode); } + @Override public void storeBlock(Block block, File file) throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); byte[] buf = new byte[8192]; @@ -157,11 +169,13 @@ private Path normalize(Path path) { return new Path(path.toUri().getPath()); } + @Override public void purge() throws IOException { inodes.clear(); blocks.clear(); } + @Override public void dump() throws IOException { StringBuilder sb = new StringBuilder(getClass().getSimpleName()); sb.append(", \n"); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java index bc8ccc0f68..abac70ac1b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java @@ -55,15 +55,18 @@ class InMemoryNativeFileSystemStore implements NativeFileSystemStore { new TreeMap(); private SortedMap dataMap = new TreeMap(); + @Override public void initialize(URI uri, Configuration conf) throws IOException { this.conf = conf; } + @Override public void storeEmptyFile(String key) throws IOException { metadataMap.put(key, new FileMetadata(key, 0, Time.now())); dataMap.put(key, new byte[0]); } + @Override public void storeFile(String key, File file, byte[] md5Hash) throws IOException { @@ -86,10 +89,12 @@ public void storeFile(String key, File file, byte[] md5Hash) dataMap.put(key, out.toByteArray()); } + @Override public InputStream retrieve(String key) throws IOException { return retrieve(key, 0); } + @Override public InputStream retrieve(String key, long byteRangeStart) throws IOException { @@ -118,15 +123,18 @@ private File createTempFile() throws IOException { return result; } + @Override public FileMetadata retrieveMetadata(String key) throws IOException { return metadataMap.get(key); } + @Override public PartialListing list(String prefix, int maxListingLength) throws IOException { return list(prefix, maxListingLength, null, false); } + @Override public PartialListing list(String prefix, int maxListingLength, String priorLastKey, boolean recursive) throws IOException { @@ -165,16 +173,19 @@ private PartialListing list(String prefix, String delimiter, commonPrefixes.toArray(new String[0])); } + @Override public void delete(String key) throws IOException { metadataMap.remove(key); dataMap.remove(key); } + @Override public void copy(String srcKey, String dstKey) throws IOException { metadataMap.put(dstKey, metadataMap.get(srcKey)); dataMap.put(dstKey, dataMap.get(srcKey)); } + @Override public void purge(String prefix) throws IOException { Iterator> i = metadataMap.entrySet().iterator(); @@ -187,6 +198,7 @@ public void purge(String prefix) throws IOException { } } + @Override public void dump() throws IOException { System.out.println(metadataMap.values()); System.out.println(dataMap.keySet()); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java index 44d7a4a7c1..e990b92465 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java @@ -347,6 +347,7 @@ static class MockFileSystem extends FilterFileSystem { MockFileSystem() { super(mock(FileSystem.class)); } + @Override public void initialize(URI name, Configuration conf) throws IOException {} } } \ No newline at end of file diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java index 2f8d8ce848..de4b1e87ac 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java @@ -33,6 +33,7 @@ public class TestFSMainOperationsLocalFileSystem extends FSMainOperationsBaseTest { static FileSystem fcTarget; + @Override @Before public void setUp() throws Exception { Configuration conf = new Configuration(); @@ -42,6 +43,7 @@ public void setUp() throws Exception { super.setUp(); } + @Override @After public void tearDown() throws Exception { super.tearDown(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcCreateMkdirLocalFs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcCreateMkdirLocalFs.java index 39e3515d03..16b38b72ec 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcCreateMkdirLocalFs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcCreateMkdirLocalFs.java @@ -28,12 +28,14 @@ public class TestFcCreateMkdirLocalFs extends FileContextCreateMkdirBaseTest { + @Override @Before public void setUp() throws Exception { fc = ViewFsTestSetup.setupForViewFsLocalFs(); super.setUp(); } + @Override @After public void tearDown() throws Exception { super.tearDown(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java index 235a182616..5641c9d70b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java @@ -36,6 +36,7 @@ public class TestFcMainOperationsLocalFs extends FileContext fclocal; Path targetOfTests; + @Override @Before public void setUp() throws Exception { /** @@ -79,6 +80,7 @@ public void setUp() throws Exception { super.setUp(); } + @Override @After public void tearDown() throws Exception { super.tearDown(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcPermissionsLocalFs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcPermissionsLocalFs.java index 3e92eb9cc1..0e44be9be8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcPermissionsLocalFs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcPermissionsLocalFs.java @@ -27,12 +27,14 @@ public class TestFcPermissionsLocalFs extends FileContextPermissionBase { + @Override @Before public void setUp() throws Exception { fc = ViewFsTestSetup.setupForViewFsLocalFs(); super.setUp(); } + @Override @After public void tearDown() throws Exception { super.tearDown(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegationTokenSupport.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegationTokenSupport.java index e3f6e404a1..735dfcf3cf 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegationTokenSupport.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegationTokenSupport.java @@ -160,6 +160,7 @@ Token[] addTokensWithCreds(FileSystem fs, Credentials creds) throws Exception static class FakeFileSystem extends RawLocalFileSystem { URI uri; + @Override public void initialize(URI name, Configuration conf) throws IOException { this.uri = name; } @@ -169,6 +170,7 @@ public Path getInitialWorkingDirectory() { return new Path("/"); // ctor calls getUri before the uri is inited... } + @Override public URI getUri() { return uri; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemLocalFileSystem.java index 8d4c38e1e6..4b45fc8c5b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemLocalFileSystem.java @@ -39,6 +39,7 @@ public class TestViewFileSystemLocalFileSystem extends ViewFileSystemBaseTest { + @Override @Before public void setUp() throws Exception { // create the test root on local_fs @@ -47,6 +48,7 @@ public void setUp() throws Exception { } + @Override @After public void tearDown() throws Exception { fsTarget.delete(FileSystemTestHelper.getTestRootPath(fsTarget), true); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemWithAuthorityLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemWithAuthorityLocalFileSystem.java index 3ba3e002e0..4786cd5fdf 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemWithAuthorityLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemWithAuthorityLocalFileSystem.java @@ -42,6 +42,7 @@ public class TestViewFileSystemWithAuthorityLocalFileSystem extends ViewFileSystemBaseTest { URI schemeWithAuthority; + @Override @Before public void setUp() throws Exception { // create the test root on local_fs @@ -55,12 +56,14 @@ public void setUp() throws Exception { fsView = FileSystem.get(schemeWithAuthority, conf); } + @Override @After public void tearDown() throws Exception { fsTarget.delete(FileSystemTestHelper.getTestRootPath(fsTarget), true); super.tearDown(); } + @Override @Test public void testBasicPaths() { Assert.assertEquals(schemeWithAuthority, diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsLocalFs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsLocalFs.java index 2a4488ce76..99bcf5d32b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsLocalFs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsLocalFs.java @@ -26,6 +26,7 @@ public class TestViewFsLocalFs extends ViewFsBaseTest { + @Override @Before public void setUp() throws Exception { // create the test root on local_fs @@ -34,6 +35,7 @@ public void setUp() throws Exception { } + @Override @After public void tearDown() throws Exception { super.tearDown(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java index 81270c2320..4325f40346 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java @@ -30,7 +30,6 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; -import org.mortbay.log.Log; public class TestViewFsTrash { FileSystem fsTarget; // the target file system - the mount will point here @@ -46,6 +45,7 @@ static class TestLFS extends LocalFileSystem { super(); this.home = home; } + @Override public Path getHomeDirectory() { return home; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsWithAuthorityLocalFs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsWithAuthorityLocalFs.java index 217d3fcd94..2e498f2c0a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsWithAuthorityLocalFs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsWithAuthorityLocalFs.java @@ -41,6 +41,7 @@ public class TestViewFsWithAuthorityLocalFs extends ViewFsBaseTest { URI schemeWithAuthority; + @Override @Before public void setUp() throws Exception { // create the test root on local_fs @@ -54,11 +55,13 @@ public void setUp() throws Exception { fcView = FileContext.getFileContext(schemeWithAuthority, conf); } + @Override @After public void tearDown() throws Exception { super.tearDown(); } + @Override @Test public void testBasicPaths() { Assert.assertEquals(schemeWithAuthority, diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java index 4a576d08eb..9eec749336 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java @@ -23,7 +23,6 @@ import java.net.URISyntaxException; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileChecksum; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java index 80612d9b78..9c68b282f6 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java @@ -25,7 +25,6 @@ import java.io.OutputStream; import java.net.Socket; import java.util.ArrayList; -import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.concurrent.CountDownLatch; @@ -82,6 +81,7 @@ public ClientBaseWithFixes() { * */ protected class NullWatcher implements Watcher { + @Override public void process(WatchedEvent event) { /* nada */ } } @@ -97,6 +97,7 @@ synchronized public void reset() { clientConnected = new CountDownLatch(1); connected = false; } + @Override synchronized public void process(WatchedEvent event) { if (event.getState() == KeeperState.SyncConnected || event.getState() == KeeperState.ConnectedReadOnly) { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java index c38bc53424..0985af18c6 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java @@ -184,6 +184,7 @@ public void close() throws IOException { } public static class DummyFencer implements FenceMethod { + @Override public void checkArgs(String args) throws BadFencingConfigurationException { } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java index d9b10ae091..eef6d7de41 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java @@ -46,14 +46,17 @@ public class TestGlobalFilter extends HttpServerFunctionalTest { static public class RecordingFilter implements Filter { private FilterConfig filterConfig = null; + @Override public void init(FilterConfig filterConfig) { this.filterConfig = filterConfig; } + @Override public void destroy() { this.filterConfig = null; } + @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { if (filterConfig == null) @@ -69,6 +72,7 @@ public void doFilter(ServletRequest request, ServletResponse response, static public class Initializer extends FilterInitializer { public Initializer() {} + @Override public void initFilter(FilterContainer container, Configuration conf) { container.addGlobalFilter("recording", RecordingFilter.class.getName(), null); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java index 73aebea486..3bd77f039c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java @@ -46,14 +46,17 @@ public class TestPathFilter extends HttpServerFunctionalTest { static public class RecordingFilter implements Filter { private FilterConfig filterConfig = null; + @Override public void init(FilterConfig filterConfig) { this.filterConfig = filterConfig; } + @Override public void destroy() { this.filterConfig = null; } + @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { if (filterConfig == null) @@ -69,6 +72,7 @@ public void doFilter(ServletRequest request, ServletResponse response, static public class Initializer extends FilterInitializer { public Initializer() {} + @Override public void initFilter(FilterContainer container, Configuration conf) { container.addFilter("recording", RecordingFilter.class.getName(), null); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java index 7bf608767e..a4d32531ce 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java @@ -45,14 +45,17 @@ public class TestServletFilter extends HttpServerFunctionalTest { static public class SimpleFilter implements Filter { private FilterConfig filterConfig = null; + @Override public void init(FilterConfig filterConfig) throws ServletException { this.filterConfig = filterConfig; } + @Override public void destroy() { this.filterConfig = null; } + @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { if (filterConfig == null) @@ -67,6 +70,7 @@ public void doFilter(ServletRequest request, ServletResponse response, static public class Initializer extends FilterInitializer { public Initializer() {} + @Override public void initFilter(FilterContainer container, Configuration conf) { container.addFilter("simple", SimpleFilter.class.getName(), null); } @@ -149,6 +153,7 @@ static public class Initializer extends FilterInitializer { public Initializer() { } + @Override public void initFilter(FilterContainer container, Configuration conf) { container.addFilter("simple", ErrorFilter.class.getName(), null); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java index e3e885ad12..74e9cc86bd 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java @@ -18,12 +18,10 @@ package org.apache.hadoop.io; -import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.lang.reflect.Type; import org.apache.avro.Schema; -import org.apache.avro.io.BinaryEncoder; import org.apache.avro.io.EncoderFactory; import org.apache.avro.reflect.ReflectData; import org.apache.avro.reflect.ReflectDatumWriter; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/RandomDatum.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/RandomDatum.java index e97f2068be..8f99aab482 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/RandomDatum.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/RandomDatum.java @@ -40,11 +40,13 @@ public int getLength() { return length; } + @Override public void write(DataOutput out) throws IOException { out.writeInt(length); out.write(data); } + @Override public void readFields(DataInput in) throws IOException { length = in.readInt(); if (data == null || length > data.length) @@ -102,6 +104,7 @@ public Comparator() { super(RandomDatum.class); } + @Override public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { int n1 = readInt(b1, s1); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java index 2ca6c87f8e..077c0b065d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java @@ -19,17 +19,9 @@ package org.apache.hadoop.io; import java.io.IOException; -import java.io.ByteArrayOutputStream; import java.util.EnumSet; import java.lang.reflect.Type; -import org.apache.avro.Schema; -import org.apache.avro.reflect.ReflectData; -import org.apache.avro.reflect.ReflectDatumWriter; -import org.apache.avro.reflect.ReflectDatumReader; -import org.apache.avro.io.BinaryEncoder; -import org.apache.avro.io.DecoderFactory; - import junit.framework.TestCase; /** Unit test for EnumSetWritable */ diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestGenericWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestGenericWritable.java index 486d93d438..880bba0e8b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestGenericWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestGenericWritable.java @@ -48,9 +48,11 @@ protected void setUp() throws Exception { /** Dummy class for testing {@link GenericWritable} */ public static class Foo implements Writable { private String foo = "foo"; + @Override public void readFields(DataInput in) throws IOException { foo = Text.readString(in); } + @Override public void write(DataOutput out) throws IOException { Text.writeString(out, foo); } @@ -65,15 +67,19 @@ public boolean equals(Object obj) { public static class Bar implements Writable, Configurable { private int bar = 42; //The Answer to The Ultimate Question Of Life, the Universe and Everything private Configuration conf = null; + @Override public void readFields(DataInput in) throws IOException { bar = in.readInt(); } + @Override public void write(DataOutput out) throws IOException { out.writeInt(bar); } + @Override public Configuration getConf() { return conf; } + @Override public void setConf(Configuration conf) { this.conf = conf; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMD5Hash.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMD5Hash.java index 86fefcf561..509d75e807 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMD5Hash.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMD5Hash.java @@ -91,6 +91,7 @@ public void testMD5Hash() throws Exception { closeHash1.hashCode() != closeHash2.hashCode()); Thread t1 = new Thread() { + @Override public void run() { for (int i = 0; i < 100; i++) { MD5Hash hash = new MD5Hash(DFF); @@ -100,6 +101,7 @@ public void run() { }; Thread t2 = new Thread() { + @Override public void run() { for (int i = 0; i < 100; i++) { MD5Hash hash = new MD5Hash(D00); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSecureIOUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSecureIOUtils.java index bae0ccd836..a48fb6770b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSecureIOUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSecureIOUtils.java @@ -21,17 +21,14 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.io.nativeio.NativeIO; import org.junit.BeforeClass; -import org.junit.Before; import org.junit.Test; import static org.junit.Assume.*; import static org.junit.Assert.*; import java.io.IOException; import java.io.File; -import java.io.FileInputStream; import java.io.FileOutputStream; public class TestSecureIOUtils { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java index fe33fefd91..1517c062b7 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java @@ -481,6 +481,7 @@ private TestFSDataInputStream(InputStream in) throws IOException { super(in); } + @Override public void close() throws IOException { closed = true; super.close(); @@ -505,6 +506,7 @@ public void testCloseForErroneousSequenceFile() try { new SequenceFile.Reader(fs, path, conf) { // this method is called by the SequenceFile.Reader constructor, overwritten, so we can access the opened file + @Override protected FSDataInputStream openFile(FileSystem fs, Path file, int bufferSize, long length) throws IOException { final InputStream in = super.openFile(fs, file, bufferSize, length); openedFile[0] = new TestFSDataInputStream(in); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java index 21da8c0dce..df9fb54032 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java @@ -284,6 +284,7 @@ public ConcurrentEncodeDecodeThread(String name) { super(name); } + @Override public void run() { String name = this.getName(); DataOutputBuffer out = new DataOutputBuffer(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestVersionedWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestVersionedWritable.java index df48f3cace..f7d45b9da7 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestVersionedWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestVersionedWritable.java @@ -37,16 +37,19 @@ public static class SimpleVersionedWritable extends VersionedWritable { private static byte VERSION = 1; + @Override public byte getVersion() { return VERSION; } + @Override public void write(DataOutput out) throws IOException { super.write(out); // version. out.writeInt(state); } + @Override public void readFields(DataInput in) throws IOException { super.readFields(in); // version this.state = in.readInt(); @@ -61,6 +64,7 @@ public static SimpleVersionedWritable read(DataInput in) throws IOException { /** Required by test code, below. */ + @Override public boolean equals(Object o) { if (!(o instanceof SimpleVersionedWritable)) return false; @@ -85,6 +89,7 @@ public static class AdvancedVersionedWritable extends SimpleVersionedWritable { SimpleVersionedWritable containedObject = new SimpleVersionedWritable(); String[] testStringArray = {"The", "Quick", "Brown", "Fox", "Jumped", "Over", "The", "Lazy", "Dog"}; + @Override public void write(DataOutput out) throws IOException { super.write(out); out.writeUTF(shortTestString); @@ -97,6 +102,7 @@ public void write(DataOutput out) throws IOException { } + @Override public void readFields(DataInput in) throws IOException { super.readFields(in); shortTestString = in.readUTF(); @@ -108,6 +114,7 @@ public void readFields(DataInput in) throws IOException { + @Override public boolean equals(Object o) { super.equals(o); @@ -134,6 +141,7 @@ public boolean equals(Object o) { /* This one checks that version mismatch is thrown... */ public static class SimpleVersionedWritableV2 extends SimpleVersionedWritable { static byte VERSION = 2; + @Override public byte getVersion() { return VERSION; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java index 31c237f872..971e237d50 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java @@ -38,10 +38,12 @@ public static class SimpleWritable implements Writable { int state = RANDOM.nextInt(); + @Override public void write(DataOutput out) throws IOException { out.writeInt(state); } + @Override public void readFields(DataInput in) throws IOException { this.state = in.readInt(); } @@ -53,6 +55,7 @@ public static SimpleWritable read(DataInput in) throws IOException { } /** Required by test code, below. */ + @Override public boolean equals(Object o) { if (!(o instanceof SimpleWritable)) return false; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableName.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableName.java index 7cb069ab00..396079c394 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableName.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableName.java @@ -39,10 +39,12 @@ public static class SimpleWritable implements Writable { int state = RANDOM.nextInt(); + @Override public void write(DataOutput out) throws IOException { out.writeInt(state); } + @Override public void readFields(DataInput in) throws IOException { this.state = in.readInt(); } @@ -54,6 +56,7 @@ public static SimpleWritable read(DataInput in) throws IOException { } /** Required by test code, below. */ + @Override public boolean equals(Object o) { if (!(o instanceof SimpleWritable)) return false; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java index 2caef859e6..280f1a8785 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java @@ -40,68 +40,81 @@ public Configuration getConf() { return conf; } + @Override public CompressionOutputStream createOutputStream(OutputStream out) throws IOException { return null; } + @Override public Class getCompressorType() { return null; } + @Override public Compressor createCompressor() { return null; } + @Override public CompressionInputStream createInputStream(InputStream in, Decompressor decompressor) throws IOException { return null; } + @Override public CompressionInputStream createInputStream(InputStream in) throws IOException { return null; } + @Override public CompressionOutputStream createOutputStream(OutputStream out, Compressor compressor) throws IOException { return null; } + @Override public Class getDecompressorType() { return null; } + @Override public Decompressor createDecompressor() { return null; } + @Override public String getDefaultExtension() { return ".base"; } } private static class BarCodec extends BaseCodec { + @Override public String getDefaultExtension() { return "bar"; } } private static class FooBarCodec extends BaseCodec { + @Override public String getDefaultExtension() { return ".foo.bar"; } } private static class FooCodec extends BaseCodec { + @Override public String getDefaultExtension() { return ".foo"; } } private static class NewGzipCodec extends BaseCodec { + @Override public String getDefaultExtension() { return ".gz"; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/NanoTimer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/NanoTimer.java index 1584895407..c25c4dc427 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/NanoTimer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/NanoTimer.java @@ -93,6 +93,7 @@ public boolean isStarted() { * * Note: If timer is never started, "ERR" will be returned. */ + @Override public String toString() { if (!readable()) { return "ERR"; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileByteArrays.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileByteArrays.java index 6242ea6b37..2682634516 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileByteArrays.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileByteArrays.java @@ -35,7 +35,6 @@ import org.apache.hadoop.io.file.tfile.TFile.Writer; import org.apache.hadoop.io.file.tfile.TFile.Reader.Location; import org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner; -import org.apache.hadoop.util.NativeCodeLoader; import org.junit.After; import org.junit.Before; import org.junit.Test; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsJClassComparatorByteArrays.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsJClassComparatorByteArrays.java index bd56d44965..7a2c2fc9c4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsJClassComparatorByteArrays.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsJClassComparatorByteArrays.java @@ -19,9 +19,6 @@ import java.io.IOException; -import org.apache.hadoop.io.RawComparator; -import org.apache.hadoop.io.WritableComparator; - /** * * Byte arrays test case class using GZ compression codec, base class of none diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java index 4e507259c0..3502198898 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java @@ -148,12 +148,14 @@ public TFileAppendable(FileSystem fs, Path path, String compress, this.writer = new TFile.Writer(fsdos, minBlkSize, compress, null, conf); } + @Override public void append(BytesWritable key, BytesWritable value) throws IOException { writer.append(key.get(), 0, key.getSize(), value.get(), 0, value .getSize()); } + @Override public void close() throws IOException { writer.close(); fsdos.close(); @@ -196,22 +198,27 @@ private void checkValueBuffer(int size) { - valueBuffer.length)]; } + @Override public byte[] getKey() { return keyBuffer; } + @Override public int getKeyLength() { return keyLength; } + @Override public byte[] getValue() { return valueBuffer; } + @Override public int getValueLength() { return valueLength; } + @Override public boolean next() throws IOException { if (scanner.atEnd()) return false; Entry entry = scanner.entry(); @@ -225,6 +232,7 @@ public boolean next() throws IOException { return true; } + @Override public void close() throws IOException { scanner.close(); reader.close(); @@ -266,11 +274,13 @@ else if (!"none".equals(compress)) } } + @Override public void append(BytesWritable key, BytesWritable value) throws IOException { writer.append(key, value); } + @Override public void close() throws IOException { writer.close(); fsdos.close(); @@ -291,26 +301,32 @@ public SeqFileReadable(FileSystem fs, Path path, int osBufferSize) value = new BytesWritable(); } + @Override public byte[] getKey() { return key.get(); } + @Override public int getKeyLength() { return key.getSize(); } + @Override public byte[] getValue() { return value.get(); } + @Override public int getValueLength() { return value.getSize(); } + @Override public boolean next() throws IOException { return reader.next(key, value); } + @Override public void close() throws IOException { reader.close(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java index b9d4ec5690..acd728b0ec 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java @@ -88,6 +88,7 @@ public void testMultiThreadedFstat() throws Exception { List statters = new ArrayList(); for (int i = 0; i < 10; i++) { Thread statter = new Thread() { + @Override public void run() { long et = Time.now() + 5000; while (Time.now() < et) { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java index 4949ef3140..77c9e30eed 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java @@ -252,6 +252,7 @@ public ConcurrentMethodThread(UnreliableInterface unreliable) { this.unreliable = unreliable; } + @Override public void run() { try { result = unreliable.failsIfIdentifierDoesntMatch("impl2"); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java index 54fe677844..5b77698b10 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java @@ -19,7 +19,6 @@ import java.io.IOException; -import org.apache.hadoop.io.retry.UnreliableInterface.UnreliableException; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.StandbyException; @@ -60,24 +59,29 @@ public UnreliableImplementation(String identifier, this.exceptionToFailWith = exceptionToFailWith; } + @Override public void alwaysSucceeds() { // do nothing } + @Override public void alwaysFailsWithFatalException() throws FatalException { throw new FatalException(); } + @Override public void alwaysFailsWithRemoteFatalException() throws RemoteException { throw new RemoteException(FatalException.class.getName(), "Oops"); } + @Override public void failsOnceThenSucceeds() throws UnreliableException { if (failsOnceInvocationCount++ == 0) { throw new UnreliableException(); } } + @Override public boolean failsOnceThenSucceedsWithReturnValue() throws UnreliableException { if (failsOnceWithValueInvocationCount++ == 0) { throw new UnreliableException(); @@ -85,6 +89,7 @@ public boolean failsOnceThenSucceedsWithReturnValue() throws UnreliableException return true; } + @Override public void failsTenTimesThenSucceeds() throws UnreliableException { if (failsTenTimesInvocationCount++ < 10) { throw new UnreliableException(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/Record.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/Record.java index 275a0dc1e2..4548c869f9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/Record.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/Record.java @@ -21,10 +21,12 @@ public class Record { public int x = 7; + @Override public int hashCode() { return x; } + @Override public boolean equals(Object obj) { if (this == obj) return true; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java index 181419c137..1926ec55e5 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java @@ -70,10 +70,12 @@ public void testReflect() throws Exception { public static class InnerRecord { public int x = 7; + @Override public int hashCode() { return x; } + @Override public boolean equals(Object obj) { if (this == obj) return true; @@ -91,10 +93,12 @@ public boolean equals(Object obj) { public static class RefSerializable implements AvroReflectSerializable { public int x = 7; + @Override public int hashCode() { return x; } + @Override public boolean equals(Object obj) { if (this == obj) return true; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java index ace6173faa..a82419d5dd 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java @@ -208,6 +208,7 @@ void connectToServerAndGetDelegationToken( try { client = proxyUserUgi.doAs(new PrivilegedExceptionAction() { + @Override public MiniProtocol run() throws IOException { MiniProtocol p = (MiniProtocol) RPC.getProxy(MiniProtocol.class, MiniProtocol.versionID, addr, conf); @@ -235,6 +236,7 @@ long connectToServerUsingDelegationToken( long start = Time.now(); try { client = currentUgi.doAs(new PrivilegedExceptionAction() { + @Override public MiniProtocol run() throws IOException { return (MiniProtocol) RPC.getProxy(MiniProtocol.class, MiniProtocol.versionID, addr, conf); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java index c7bc6411de..a0d6de0e9a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java @@ -130,6 +130,7 @@ public SerialCaller(Client client, InetSocketAddress server, int count) { this.count = count; } + @Override public void run() { for (int i = 0; i < count; i++) { try { @@ -219,6 +220,7 @@ static void maybeThrowRTE() { private static class IOEOnReadWritable extends LongWritable { public IOEOnReadWritable() {} + @Override public void readFields(DataInput in) throws IOException { super.readFields(in); maybeThrowIOE(); @@ -229,6 +231,7 @@ public void readFields(DataInput in) throws IOException { private static class RTEOnReadWritable extends LongWritable { public RTEOnReadWritable() {} + @Override public void readFields(DataInput in) throws IOException { super.readFields(in); maybeThrowRTE(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java index e2e32c75ba..bf9fbc26d8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java @@ -106,17 +106,21 @@ DescriptorProtos.EnumDescriptorProto exchangeProto( public static class TestImpl implements TestProtocol { int fastPingCounter = 0; + @Override public long getProtocolVersion(String protocol, long clientVersion) { return TestProtocol.versionID; } + @Override public ProtocolSignature getProtocolSignature(String protocol, long clientVersion, int hashcode) { return new ProtocolSignature(TestProtocol.versionID, null); } + @Override public void ping() {} + @Override public synchronized void slowPing(boolean shouldSlow) { if (shouldSlow) { while (fastPingCounter < 2) { @@ -131,17 +135,22 @@ public synchronized void slowPing(boolean shouldSlow) { } } + @Override public String echo(String value) throws IOException { return value; } + @Override public String[] echo(String[] values) throws IOException { return values; } + @Override public Writable echo(Writable writable) { return writable; } + @Override public int add(int v1, int v2) { return v1 + v2; } + @Override public int add(int[] values) { int sum = 0; for (int i = 0; i < values.length; i++) { @@ -150,16 +159,19 @@ public int add(int[] values) { return sum; } + @Override public int error() throws IOException { throw new IOException("bobo"); } + @Override public void testServerGet() throws IOException { if (!(Server.get() instanceof RPC.Server)) { throw new IOException("Server.get() failed"); } } + @Override public int[] exchange(int[] values) { for (int i = 0; i < values.length; i++) { values[i] = i; @@ -186,6 +198,7 @@ static class Transactions implements Runnable { } // do two RPC that transfers data. + @Override public void run() { int[] indata = new int[datasize]; int[] outdata = null; @@ -220,6 +233,7 @@ boolean isDone() { return done; } + @Override public void run() { try { proxy.slowPing(true); // this would hang until two fast pings happened diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java index 50ae210ea9..e2b7707cd9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java @@ -284,6 +284,7 @@ public void testHashCode() throws Exception { "org.apache.hadoop.ipc.TestRPCCompatibility$TestProtocol1") public interface TestProtocol4 extends TestProtocol2 { public static final long versionID = 4L; + @Override int echo(int value) throws IOException; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java index 9246fd5d72..014875440e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java @@ -136,15 +136,18 @@ public void write(DataOutput out) throws IOException { public static class TestTokenSecretManager extends SecretManager { + @Override public byte[] createPassword(TestTokenIdentifier id) { return id.getBytes(); } + @Override public byte[] retrievePassword(TestTokenIdentifier id) throws InvalidToken { return id.getBytes(); } + @Override public TestTokenIdentifier createIdentifier() { return new TestTokenIdentifier(); } @@ -152,6 +155,7 @@ public TestTokenIdentifier createIdentifier() { public static class BadTokenSecretManager extends TestTokenSecretManager { + @Override public byte[] retrievePassword(TestTokenIdentifier id) throws InvalidToken { throw new InvalidToken(ERROR_MESSAGE); @@ -186,6 +190,7 @@ public interface TestSaslProtocol extends TestRPC.TestProtocol { public static class TestSaslImpl extends TestRPC.TestImpl implements TestSaslProtocol { + @Override public AuthenticationMethod getAuthMethod() throws IOException { return UserGroupInformation.getCurrentUser().getAuthenticationMethod(); } @@ -450,6 +455,7 @@ public void testDigestAuthMethod() throws Exception { current.addToken(token); current.doAs(new PrivilegedExceptionAction() { + @Override public Object run() throws IOException { TestSaslProtocol proxy = null; try { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics/TestMetricsServlet.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics/TestMetricsServlet.java index 8d5cfc9a55..ec54f59686 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics/TestMetricsServlet.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics/TestMetricsServlet.java @@ -43,6 +43,7 @@ public class TestMetricsServlet extends TestCase { * Initializes, for testing, two NoEmitMetricsContext's, and adds one value * to the first of them. */ + @Override public void setUp() throws IOException { nc1 = new NoEmitMetricsContext(); nc1.init("test1", ContextFactory.getFactory()); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMetricsAnnotations.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMetricsAnnotations.java index 685fedc22c..5b75e33e31 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMetricsAnnotations.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMetricsAnnotations.java @@ -21,8 +21,6 @@ import org.junit.Test; import static org.junit.Assert.*; import static org.mockito.Mockito.*; -import static org.apache.hadoop.test.MockitoMaker.*; - import org.apache.hadoop.metrics2.MetricsCollector; import org.apache.hadoop.metrics2.MetricsException; import org.apache.hadoop.metrics2.MetricsRecordBuilder; @@ -145,6 +143,7 @@ static class HybridMetrics implements MetricsSource { @Metric int getG0() { return 0; } + @Override public void getMetrics(MetricsCollector collector, boolean all) { collector.addRecord("foo") .setContext("foocontext") @@ -183,6 +182,7 @@ static class BadHybridMetrics implements MetricsSource { @Metric MutableCounterInt c1; + @Override public void getMetrics(MetricsCollector collector, boolean all) { collector.addRecord("foo"); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMetricsRegistry.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMetricsRegistry.java index 1969ccee54..47b496fa57 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMetricsRegistry.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMetricsRegistry.java @@ -51,6 +51,7 @@ public class TestMetricsRegistry { assertTrue("s1 found", r.get("s1") instanceof MutableStat); expectMetricsException("Metric name c1 already exists", new Runnable() { + @Override public void run() { r.newCounter("c1", "test dup", 0); } }); } @@ -70,10 +71,12 @@ public class TestMetricsRegistry { r.newGauge("g1", "test add", 1); expectMetricsException("Unsupported add", new Runnable() { + @Override public void run() { r.add("c1", 42); } }); expectMetricsException("Unsupported add", new Runnable() { + @Override public void run() { r.add("g1", 42); } }); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/StaticMapping.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/StaticMapping.java index 379e9401d4..4204e2b624 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/StaticMapping.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/StaticMapping.java @@ -21,10 +21,8 @@ import java.util.ArrayList; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Set; /** * Implements the {@link DNSToSwitchMapping} via static mappings. Used diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/FromCpp.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/FromCpp.java index 2cd2271f43..aeb68ea1de 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/FromCpp.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/FromCpp.java @@ -33,9 +33,11 @@ public FromCpp(String testName) { super(testName); } + @Override protected void setUp() throws Exception { } + @Override protected void tearDown() throws Exception { } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/RecordBench.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/RecordBench.java index 1cba75ed80..816d69ee26 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/RecordBench.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/RecordBench.java @@ -23,8 +23,6 @@ import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; -import java.lang.reflect.Array; -import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Random; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestRecordIO.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestRecordIO.java index 163ec1b00b..38eb9a0761 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestRecordIO.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestRecordIO.java @@ -34,9 +34,11 @@ public TestRecordIO(String testName) { super(testName); } + @Override protected void setUp() throws Exception { } + @Override protected void tearDown() throws Exception { } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestRecordVersioning.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestRecordVersioning.java index 129ba2ced8..5977f03f85 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestRecordVersioning.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestRecordVersioning.java @@ -35,9 +35,11 @@ public TestRecordVersioning(String testName) { super(testName); } + @Override protected void setUp() throws Exception { } + @Override protected void tearDown() throws Exception { } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/ToCpp.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/ToCpp.java index d3c6385d74..7a3411e1ef 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/ToCpp.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/ToCpp.java @@ -33,9 +33,11 @@ public ToCpp(String testName) { super(testName); } + @Override protected void setUp() throws Exception { } + @Override protected void tearDown() throws Exception { } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java index 3c12047be2..d8138817e1 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java @@ -22,7 +22,6 @@ import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.http.FilterContainer; -import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -55,6 +54,7 @@ public void testConfiguration() throws Exception { FilterContainer container = Mockito.mock(FilterContainer.class); Mockito.doAnswer( new Answer() { + @Override public Object answer(InvocationOnMock invocationOnMock) throws Throwable { Object[] args = invocationOnMock.getArguments(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCredentials.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCredentials.java index d432623be0..72d02dbc6e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCredentials.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCredentials.java @@ -29,13 +29,10 @@ import java.security.Key; import java.security.NoSuchAlgorithmException; import java.util.HashMap; -import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.Collection; -import static org.mockito.Mockito.mock; - import javax.crypto.KeyGenerator; import org.apache.hadoop.io.Text; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java index ee7bc29d1e..de35cd2460 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java @@ -39,7 +39,6 @@ import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenInfo; import org.junit.Test; -import org.apache.hadoop.ipc.TestSaslRPC; import org.apache.hadoop.ipc.TestSaslRPC.TestTokenSecretManager; import org.apache.hadoop.ipc.TestSaslRPC.TestTokenIdentifier; import org.apache.hadoop.ipc.TestSaslRPC.TestTokenSelector; @@ -113,6 +112,7 @@ public void testCreateProxyUser() throws Exception { PROXY_USER_NAME, realUserUgi); UserGroupInformation curUGI = proxyUserUgi .doAs(new PrivilegedExceptionAction() { + @Override public UserGroupInformation run() throws IOException { return UserGroupInformation.getCurrentUser(); } @@ -131,10 +131,12 @@ public interface TestProtocol extends VersionedProtocol { public class TestImpl implements TestProtocol { + @Override public String aMethod() throws IOException { return UserGroupInformation.getCurrentUser().toString(); } + @Override public long getProtocolVersion(String protocol, long clientVersion) throws IOException { return TestProtocol.versionID; @@ -168,6 +170,7 @@ public void testRealUserSetup() throws IOException { PROXY_USER_NAME, realUserUgi, GROUP_NAMES); String retVal = proxyUserUgi .doAs(new PrivilegedExceptionAction() { + @Override public String run() throws IOException { proxy = RPC.getProxy(TestProtocol.class, TestProtocol.versionID, addr, conf); @@ -210,6 +213,7 @@ public void testRealUserAuthorizationSuccess() throws IOException { .createProxyUserForTesting(PROXY_USER_NAME, realUserUgi, GROUP_NAMES); String retVal = proxyUserUgi .doAs(new PrivilegedExceptionAction() { + @Override public String run() throws IOException { proxy = RPC.getProxy(TestProtocol.class, TestProtocol.versionID, addr, conf); @@ -257,6 +261,7 @@ public void testRealUserIPAuthorizationFailure() throws IOException { .createProxyUserForTesting(PROXY_USER_NAME, realUserUgi, GROUP_NAMES); String retVal = proxyUserUgi .doAs(new PrivilegedExceptionAction() { + @Override public String run() throws IOException { proxy = RPC.getProxy(TestProtocol.class, TestProtocol.versionID, addr, conf); @@ -296,6 +301,7 @@ public void testRealUserIPNotSpecified() throws IOException { .createProxyUserForTesting(PROXY_USER_NAME, realUserUgi, GROUP_NAMES); String retVal = proxyUserUgi .doAs(new PrivilegedExceptionAction() { + @Override public String run() throws IOException { proxy = RPC.getProxy(TestProtocol.class, TestProtocol.versionID, addr, conf); @@ -334,6 +340,7 @@ public void testRealUserGroupNotSpecified() throws IOException { .createProxyUserForTesting(PROXY_USER_NAME, realUserUgi, GROUP_NAMES); String retVal = proxyUserUgi .doAs(new PrivilegedExceptionAction() { + @Override public String run() throws IOException { proxy = (TestProtocol) RPC.getProxy(TestProtocol.class, TestProtocol.versionID, addr, conf); @@ -375,6 +382,7 @@ public void testRealUserGroupAuthorizationFailure() throws IOException { .createProxyUserForTesting(PROXY_USER_NAME, realUserUgi, GROUP_NAMES); String retVal = proxyUserUgi .doAs(new PrivilegedExceptionAction() { + @Override public String run() throws IOException { proxy = RPC.getProxy(TestProtocol.class, TestProtocol.versionID, addr, conf); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java index b284fe0c6a..48627276f8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java @@ -54,6 +54,7 @@ public static class FakeGroupMapping extends ShellBasedUnixGroupsMapping { private static Set allGroups = new HashSet(); private static Set blackList = new HashSet(); + @Override public List getGroups(String user) throws IOException { LOG.info("Getting groups for " + user); if (blackList.contains(user)) { @@ -62,6 +63,7 @@ public List getGroups(String user) throws IOException { return new LinkedList(allGroups); } + @Override public void cacheGroupsRefresh() throws IOException { LOG.info("Cache is being refreshed."); clearBlackList(); @@ -73,6 +75,7 @@ public static void clearBlackList() throws IOException { blackList.clear(); } + @Override public void cacheGroupsAdd(List groups) throws IOException { LOG.info("Adding " + groups + " to groups."); allGroups.addAll(groups); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestJNIGroupsMapping.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestJNIGroupsMapping.java index e8b3a1c918..99c5c2a83f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestJNIGroupsMapping.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestJNIGroupsMapping.java @@ -21,16 +21,11 @@ import java.util.Arrays; import java.util.List; -import java.util.SortedSet; -import java.util.TreeSet; - -import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.security.GroupMappingServiceProvider; import org.apache.hadoop.security.JniBasedUnixGroupsMapping; import org.apache.hadoop.security.ShellBasedUnixGroupsMapping; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.NativeCodeLoader; -import org.apache.hadoop.util.ReflectionUtils; import org.junit.Before; import org.junit.Test; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java index 4d8224b7cc..ce8ee28207 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java @@ -92,6 +92,7 @@ public void testLogin() throws Exception { UserGroupInformation.createUserForTesting(USER_NAME, GROUP_NAMES); UserGroupInformation curUGI = userGroupInfo.doAs(new PrivilegedExceptionAction(){ + @Override public UserGroupInformation run() throws IOException { return UserGroupInformation.getCurrentUser(); }}); @@ -316,6 +317,7 @@ public void testUGITokens() throws Exception { // ensure that the tokens are passed through doAs Collection> otherSet = ugi.doAs(new PrivilegedExceptionAction>>(){ + @Override public Collection> run() throws IOException { return UserGroupInformation.getCurrentUser().getTokens(); } @@ -342,6 +344,7 @@ public void testTokenIdentifiers() throws Exception { // ensure that the token identifiers are passed through doAs Collection otherSet = ugi .doAs(new PrivilegedExceptionAction>() { + @Override public Collection run() throws IOException { return UserGroupInformation.getCurrentUser().getTokenIdentifiers(); } @@ -358,6 +361,7 @@ public void testUGIAuthMethod() throws Exception { ugi.setAuthenticationMethod(am); Assert.assertEquals(am, ugi.getAuthenticationMethod()); ugi.doAs(new PrivilegedExceptionAction() { + @Override public Object run() throws IOException { Assert.assertEquals(am, UserGroupInformation.getCurrentUser() .getAuthenticationMethod()); @@ -379,6 +383,7 @@ public void testUGIAuthMethodInRealUser() throws Exception { Assert.assertEquals(am, UserGroupInformation .getRealAuthenticationMethod(proxyUgi)); proxyUgi.doAs(new PrivilegedExceptionAction() { + @Override public Object run() throws IOException { Assert.assertEquals(AuthenticationMethod.PROXY, UserGroupInformation .getCurrentUser().getAuthenticationMethod()); @@ -451,6 +456,7 @@ public static void verifyLoginMetrics(long success, int failure) public void testUGIUnderNonHadoopContext() throws Exception { Subject nonHadoopSubject = new Subject(); Subject.doAs(nonHadoopSubject, new PrivilegedExceptionAction() { + @Override public Void run() throws IOException { UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); assertNotNull(ugi); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java index 39ff6808fe..32f1fa1501 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java @@ -22,13 +22,10 @@ import java.util.List; import org.junit.Test; -import org.junit.Before; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; -import static org.junit.Assume.assumeTrue; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/TestToken.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/TestToken.java index 6d7d695663..1741eb7477 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/TestToken.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/TestToken.java @@ -18,8 +18,6 @@ package org.apache.hadoop.security.token; -import static junit.framework.Assert.assertEquals; - import java.io.*; import java.util.Arrays; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java index c1dd00a4d7..85e227921f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java @@ -19,7 +19,6 @@ package org.apache.hadoop.security.token.delegation; import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; import java.io.DataInput; import java.io.DataInputStream; import java.io.DataOutput; @@ -47,7 +46,6 @@ import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation; import org.apache.hadoop.util.Daemon; -import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Time; import org.junit.Test; @@ -73,9 +71,11 @@ public Text getKind() { return KIND; } + @Override public void write(DataOutput out) throws IOException { super.write(out); } + @Override public void readFields(DataInput in) throws IOException { super.readFields(in); } @@ -231,6 +231,7 @@ public void testDelegationTokenSecretManager() throws Exception { dtSecretManager, "SomeUser", "JobTracker"); // Fake renewer should not be able to renew shouldThrow(new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { dtSecretManager.renewToken(token, "FakeRenewer"); return null; @@ -259,6 +260,7 @@ public Object run() throws Exception { Thread.sleep(2000); shouldThrow(new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { dtSecretManager.renewToken(token, "JobTracker"); return null; @@ -280,6 +282,7 @@ public void testCancelDelegationToken() throws Exception { generateDelegationToken(dtSecretManager, "SomeUser", "JobTracker"); //Fake renewer should not be able to renew shouldThrow(new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { dtSecretManager.renewToken(token, "FakeCanceller"); return null; @@ -287,6 +290,7 @@ public Object run() throws Exception { }, AccessControlException.class); dtSecretManager.cancelToken(token, "JobTracker"); shouldThrow(new PrivilegedExceptionAction() { + @Override public Object run() throws Exception { dtSecretManager.renewToken(token, "JobTracker"); return null; @@ -379,6 +383,7 @@ public void testParallelDelegationTokenCreation() throws Exception { final int numTokensPerThread = 100; class tokenIssuerThread implements Runnable { + @Override public void run() { for(int i =0;i T execute(String user, final Configuration conf, final FileSystemExec getAuthority()); UserGroupInformation ugi = getUGI(user); return ugi.doAs(new PrivilegedExceptionAction() { + @Override public T run() throws Exception { FileSystem fs = createFileSystem(conf); Instrumentation instrumentation = getServer().get(Instrumentation.class); @@ -362,6 +363,7 @@ public FileSystem createFileSystemInternal(String user, final Configuration conf new URI(conf.get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY)).getAuthority()); UserGroupInformation ugi = getUGI(user); return ugi.doAs(new PrivilegedExceptionAction() { + @Override public FileSystem run() throws Exception { return createFileSystem(conf); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/instrumentation/InstrumentationService.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/instrumentation/InstrumentationService.java index 9a36955d6a..ee4455c999 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/instrumentation/InstrumentationService.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/instrumentation/InstrumentationService.java @@ -85,16 +85,19 @@ public void init() throws ServiceException { all.put("samplers", (Map) samplers); jvmVariables.put("free.memory", new VariableHolder(new Instrumentation.Variable() { + @Override public Long getValue() { return Runtime.getRuntime().freeMemory(); } })); jvmVariables.put("max.memory", new VariableHolder(new Instrumentation.Variable() { + @Override public Long getValue() { return Runtime.getRuntime().maxMemory(); } })); jvmVariables.put("total.memory", new VariableHolder(new Instrumentation.Variable() { + @Override public Long getValue() { return Runtime.getRuntime().totalMemory(); } @@ -162,6 +165,7 @@ static class Cron implements Instrumentation.Cron { long own; long total; + @Override public Cron start() { if (total != 0) { throw new IllegalStateException("Cron already used"); @@ -175,6 +179,7 @@ public Cron start() { return this; } + @Override public Cron stop() { if (total != 0) { throw new IllegalStateException("Cron already used"); diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/scheduler/SchedulerService.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/scheduler/SchedulerService.java index f4e5bafece..2da7f24ec3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/scheduler/SchedulerService.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/scheduler/SchedulerService.java @@ -96,6 +96,7 @@ public void schedule(final Callable callable, long delay, long interval, Time LOG.debug("Scheduling callable [{}], interval [{}] seconds, delay [{}] in [{}]", new Object[]{callable, delay, interval, unit}); Runnable r = new Runnable() { + @Override public void run() { String instrName = callable.getClass().getSimpleName(); Instrumentation instr = getServer().get(Instrumentation.class); diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java index c56f6e4968..b040054267 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java @@ -147,6 +147,7 @@ static String getDir(String name, String dirType, String defaultDir) { * * @param event servelt context event. */ + @Override public void contextInitialized(ServletContextEvent event) { try { init(); @@ -194,6 +195,7 @@ protected InetSocketAddress resolveAuthority() throws ServerException { * * @param event servelt context event. */ + @Override public void contextDestroyed(ServletContextEvent event) { destroy(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java index bce8c3b0d9..f126032976 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java @@ -29,6 +29,7 @@ public BooleanParam(String name, Boolean defaultValue) { super(name, defaultValue); } + @Override protected Boolean parse(String str) throws Exception { if (str.equalsIgnoreCase("true")) { return true; diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java index b0e1173572..bc2c4a54c0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java @@ -27,6 +27,7 @@ public ByteParam(String name, Byte defaultValue) { super(name, defaultValue); } + @Override protected Byte parse(String str) throws Exception { return Byte.parseByte(str); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java index d76db629b2..8baef67e8c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java @@ -32,6 +32,7 @@ public EnumParam(String name, Class e, E defaultValue) { klass = e; } + @Override protected E parse(String str) throws Exception { return Enum.valueOf(klass, str.toUpperCase()); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/IntegerParam.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/IntegerParam.java index faa99a440e..b7b08f6a9b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/IntegerParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/IntegerParam.java @@ -27,6 +27,7 @@ public IntegerParam(String name, Integer defaultValue) { super(name, defaultValue); } + @Override protected Integer parse(String str) throws Exception { return Integer.parseInt(str); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/LongParam.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/LongParam.java index c2399bf76b..11bf082060 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/LongParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/LongParam.java @@ -27,6 +27,7 @@ public LongParam(String name, Long defaultValue) { super(name, defaultValue); } + @Override protected Long parse(String str) throws Exception { return Long.parseLong(str); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Param.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Param.java index f73c52fd5a..8af5373a3e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Param.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Param.java @@ -55,6 +55,7 @@ public T value() { protected abstract T parse(String str) throws Exception; + @Override public String toString() { return (value != null) ? value.toString() : "NULL"; } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ShortParam.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ShortParam.java index 7986e72bdb..7d700c1744 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ShortParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ShortParam.java @@ -34,6 +34,7 @@ public ShortParam(String name, Short defaultValue) { this(name, defaultValue, 10); } + @Override protected Short parse(String str) throws Exception { return Short.parseShort(str, radix); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/StringParam.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/StringParam.java index 85bee1c901..1695eb3aa2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/StringParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/StringParam.java @@ -36,6 +36,7 @@ public StringParam(String name, String defaultValue, Pattern pattern) { parseParam(defaultValue); } + @Override public String parseParam(String str) { try { if (str != null) { @@ -52,6 +53,7 @@ public String parseParam(String str) { return value; } + @Override protected String parse(String str) throws Exception { if (pattern != null) { if (!pattern.matcher(str).matches()) { diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystemLocalFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystemLocalFileSystem.java index 87b1420996..0cb0cc64b3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystemLocalFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystemLocalFileSystem.java @@ -47,14 +47,17 @@ public TestHttpFSFileSystemLocalFileSystem(Operation operation) { super(operation); } + @Override protected Path getProxiedFSTestDir() { return addPrefix(new Path(TestDirHelper.getTestDir().getAbsolutePath())); } + @Override protected String getProxiedFSURI() { return "file:///"; } + @Override protected Configuration getProxiedFSConf() { Configuration conf = new Configuration(false); conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, getProxiedFSURI()); diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSWithHttpFSFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSWithHttpFSFileSystem.java index fa0a7555a7..b211e9a466 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSWithHttpFSFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSWithHttpFSFileSystem.java @@ -32,19 +32,23 @@ public TestHttpFSWithHttpFSFileSystem(Operation operation) { super(operation); } + @Override protected Class getFileSystemClass() { return HttpFSFileSystem.class; } + @Override protected Path getProxiedFSTestDir() { return TestHdfsHelper.getHdfsTestDir(); } + @Override protected String getProxiedFSURI() { return TestHdfsHelper.getHdfsConf().get( CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY); } + @Override protected Configuration getProxiedFSConf() { return TestHdfsHelper.getHdfsConf(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSKerberosAuthenticationHandler.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSKerberosAuthenticationHandler.java index a32671854c..db4cdeeadb 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSKerberosAuthenticationHandler.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSKerberosAuthenticationHandler.java @@ -34,7 +34,6 @@ import org.apache.hadoop.test.HFSTestCase; import org.apache.hadoop.test.TestDir; import org.apache.hadoop.test.TestDirHelper; -import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.junit.Test; diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHFSTestCase.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHFSTestCase.java index f4996de542..eb2cdc6142 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHFSTestCase.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHFSTestCase.java @@ -79,6 +79,7 @@ public void testDirAnnotation() throws Exception { public void waitFor() { long start = Time.now(); long waited = waitFor(1000, new Predicate() { + @Override public boolean evaluate() throws Exception { return true; } @@ -93,6 +94,7 @@ public void waitForTimeOutRatio1() { setWaitForRatio(1); long start = Time.now(); long waited = waitFor(200, new Predicate() { + @Override public boolean evaluate() throws Exception { return false; } @@ -107,6 +109,7 @@ public void waitForTimeOutRatio2() { setWaitForRatio(2); long start = Time.now(); long waited = waitFor(200, new Predicate() { + @Override public boolean evaluate() throws Exception { return false; } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java index 10c798f3fa..74d34ec80e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHTestCase.java @@ -64,6 +64,7 @@ public void testDirAnnotation() throws Exception { public void waitFor() { long start = Time.now(); long waited = waitFor(1000, new Predicate() { + @Override public boolean evaluate() throws Exception { return true; } @@ -78,6 +79,7 @@ public void waitForTimeOutRatio1() { setWaitForRatio(1); long start = Time.now(); long waited = waitFor(200, new Predicate() { + @Override public boolean evaluate() throws Exception { return false; } @@ -92,6 +94,7 @@ public void waitForTimeOutRatio2() { setWaitForRatio(2); long start = Time.now(); long waited = waitFor(200, new Predicate() { + @Override public boolean evaluate() throws Exception { return false; } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfsHelper.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfsHelper.java index 2afd7d35a4..26d253fecb 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfsHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfsHelper.java @@ -31,6 +31,7 @@ public class TestHdfsHelper extends TestDirHelper { + @Override @Test public void dummy() { } diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index ba2a8b7564..0f4c5263d1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -126,6 +126,9 @@ Trunk (unreleased changes) HDFS-3819. Should check whether invalidate work percentage default value is not greater than 1.0f. (Jing Zhao via jitendra) + HDFS-3844. Add @Override and remove {@inheritdoc} and unnecessary + imports. (Jing Zhao via suresh) + OPTIMIZATIONS BUG FIXES diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/fs/Hdfs.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/fs/Hdfs.java index 2386c84130..222d454a70 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/fs/Hdfs.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/fs/Hdfs.java @@ -312,9 +312,6 @@ public FileStatus[] listStatus(Path f) return listing.toArray(new FileStatus[listing.size()]); } - /** - * {@inheritDoc} - */ @Override public RemoteIterator listCorruptFileBlocks(Path path) throws IOException { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HsftpFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HsftpFileSystem.java index 4150c5c6fe..438d56e52f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HsftpFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HsftpFileSystem.java @@ -23,7 +23,6 @@ import java.net.HttpURLConnection; import java.net.InetSocketAddress; import java.net.URI; -import java.net.URISyntaxException; import java.net.URL; import java.security.KeyStore; import java.security.cert.X509Certificate; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/RemoteBlockReader2.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/RemoteBlockReader2.java index 39a9b3086a..c24a59b87d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/RemoteBlockReader2.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/RemoteBlockReader2.java @@ -23,7 +23,6 @@ import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; -import java.io.InputStream; import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.Socket; @@ -35,7 +34,6 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtoUtil; -import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferEncryptor; import org.apache.hadoop.hdfs.protocol.datatransfer.IOStreamPair; import org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader; import org.apache.hadoop.hdfs.protocol.datatransfer.PacketReceiver; @@ -47,8 +45,6 @@ import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey; import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException; -import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; -import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.SocketInputWrapper; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.DataChecksum; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java index f3575c4caa..ac6adfefb6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java @@ -118,7 +118,6 @@ static boolean isEqual(Object a, Object b) { return a == null ? b == null : a.equals(b); } - /** {@inheritDoc} */ @Override public boolean equals(Object obj) { if (obj == this) { @@ -135,7 +134,6 @@ && isEqual(this.blockPoolId, that.blockPoolId) return false; } - /** {@inheritDoc} */ @Override public int hashCode() { return (int) expiryDate ^ keyId ^ (int) blockId ^ modes.hashCode() diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java index 1558b4a8f5..01ee2a1222 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java @@ -59,7 +59,6 @@ import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.BlockUCState; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.ReplicaState; -import org.apache.hadoop.hdfs.server.common.Util; import org.apache.hadoop.hdfs.server.namenode.FSClusterStats; import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.server.namenode.Namesystem; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockPlacementPolicyWithNodeGroup.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockPlacementPolicyWithNodeGroup.java index d7c4d8a305..00b0b0723b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockPlacementPolicyWithNodeGroup.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockPlacementPolicyWithNodeGroup.java @@ -101,9 +101,6 @@ protected DatanodeDescriptor chooseLocalNode( blocksize, maxNodesPerRack, results); } - /** - * {@inheritDoc} - */ @Override protected void adjustExcludedNodes(HashMap excludedNodes, Node chosenNode) { @@ -121,9 +118,6 @@ private void addNodeGroupToExcludedNodes(HashMap excludedNodes, } } - /** - * {@inheritDoc} - */ @Override protected DatanodeDescriptor chooseLocalRack( DatanodeDescriptor localMachine, @@ -172,9 +166,6 @@ protected DatanodeDescriptor chooseLocalRack( } } - /** - * {@inheritDoc} - */ @Override protected void chooseRemoteRack(int numOfReplicas, DatanodeDescriptor localMachine, diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java index 2cab5e207f..f7594284a4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java @@ -50,7 +50,6 @@ import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.UnregisteredNodeException; import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor.BlockTargetPair; -import org.apache.hadoop.hdfs.server.common.Util; import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.server.namenode.Namesystem; import org.apache.hadoop.hdfs.server.protocol.BalancerBandwidthCommand; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/HeartbeatManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/HeartbeatManager.java index 8f921bde1f..73926010a4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/HeartbeatManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/HeartbeatManager.java @@ -26,7 +26,6 @@ import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.protocol.DatanodeID; -import org.apache.hadoop.hdfs.server.common.Util; import org.apache.hadoop.hdfs.server.namenode.Namesystem; import org.apache.hadoop.util.Daemon; import org.apache.hadoop.util.Time; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/PendingDataNodeMessages.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/PendingDataNodeMessages.java index b7da116048..860d1d261f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/PendingDataNodeMessages.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/PendingDataNodeMessages.java @@ -22,11 +22,7 @@ import java.util.Queue; import org.apache.hadoop.hdfs.protocol.Block; -import org.apache.hadoop.hdfs.server.blockmanagement.PendingDataNodeMessages.ReportedBlockInfo; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.ReplicaState; -import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; - -import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.common.collect.Maps; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java index 60a1216d12..831f3430be 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java @@ -44,7 +44,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.BlockReader; import org.apache.hadoop.hdfs.BlockReaderFactory; -import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; @@ -60,14 +59,12 @@ import org.apache.hadoop.hdfs.web.resources.DoAsParam; import org.apache.hadoop.hdfs.web.resources.UserParam; import org.apache.hadoop.http.HtmlQuoting; -import org.apache.hadoop.io.Text; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.authentication.util.KerberosName; -import org.apache.hadoop.security.authorize.AuthorizationException; import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.VersionInfo; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/Storage.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/Storage.java index ca596a2b0e..f89fbde121 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/Storage.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/Storage.java @@ -859,6 +859,7 @@ public interface FormatConfirmable { * @return a string representation of the formattable item, suitable * for display to the user inside a prompt */ + @Override public String toString(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java index 2d1ff6437b..4393ec7bca 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java @@ -16,8 +16,6 @@ */ package org.apache.hadoop.hdfs.server.datanode; -import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION; - import java.net.InetSocketAddress; import java.net.ServerSocket; import java.nio.channels.ServerSocketChannel; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogInputStream.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogInputStream.java index de80f80cf2..3816dc10d4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogInputStream.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogInputStream.java @@ -22,9 +22,6 @@ import java.io.Closeable; import java.io.IOException; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; - /** * A generic abstract class to support reading edits log data from * persistent storage. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java index 2b4d3cba91..ad7f71cfe9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java @@ -29,12 +29,10 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.LayoutVersion; -import org.apache.hadoop.hdfs.protocol.LayoutVersion.Feature; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoUnderConstruction; import org.apache.hadoop.hdfs.server.common.Storage; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java index 1a2b05a793..7dac687fc6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java @@ -33,7 +33,6 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.hdfs.protocol.Block; -import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.LayoutVersion; import org.apache.hadoop.hdfs.protocol.LayoutVersion.Feature; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormat.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormat.java index 66679b05fb..e1882d9481 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormat.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormat.java @@ -43,7 +43,6 @@ import org.apache.hadoop.hdfs.protocol.LayoutVersion; import org.apache.hadoop.hdfs.protocol.LayoutVersion.Feature; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo; -import org.apache.hadoop.hdfs.server.common.GenerationStamp; import org.apache.hadoop.hdfs.server.common.InconsistentFSStateException; import org.apache.hadoop.io.MD5Hash; import org.apache.hadoop.io.Text; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageSerialization.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageSerialization.java index d6453fa8b5..a8df0f706c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageSerialization.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageSerialization.java @@ -31,7 +31,6 @@ import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoUnderConstruction; -import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.BlockUCState; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.ShortWritable; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java index 44b0437d13..7090f455d8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java @@ -18,13 +18,8 @@ package org.apache.hadoop.hdfs.server.namenode; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ADMIN; -import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY; -import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY; -import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT; - import java.io.IOException; import java.net.InetSocketAddress; -import java.security.PrivilegedExceptionAction; import java.util.HashMap; import java.util.Map; @@ -34,7 +29,6 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSConfigKeys; -import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.server.common.JspHelper; import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods; import org.apache.hadoop.hdfs.web.AuthFilter; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RedundantEditLogInputStream.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RedundantEditLogInputStream.java index 7a30869290..eb6a8ea1c5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RedundantEditLogInputStream.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RedundantEditLogInputStream.java @@ -20,13 +20,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.Comparator; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; - -import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hdfs.protocol.HdfsConstants; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SerialNumberManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SerialNumberManager.java index e12ce698f3..6897e353ff 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SerialNumberManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SerialNumberManager.java @@ -75,7 +75,7 @@ T get(int i) { return t; } - /** {@inheritDoc} */ + @Override public String toString() { return "max=" + max + ",\n t2i=" + t2i + ",\n i2t=" + i2t; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/StatisticsEditsVisitor.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/StatisticsEditsVisitor.java index 8030f2817e..3fd1dc26a0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/StatisticsEditsVisitor.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/StatisticsEditsVisitor.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.hdfs.tools.offlineEditsViewer; -import java.io.FileWriter; import java.io.IOException; import java.io.OutputStream; import java.io.PrintStream; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java index 2aade9eb14..0c8ac6353c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java @@ -31,13 +31,11 @@ import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.hdfs.server.namenode.FSImageSerialization; import org.apache.hadoop.hdfs.tools.offlineImageViewer.ImageVisitor.ImageElement; -import org.apache.hadoop.hdfs.util.XMLUtils; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.CompressionCodecFactory; import org.apache.hadoop.security.token.delegation.DelegationKey; -import org.xml.sax.helpers.AttributesImpl; /** * ImageLoaderCurrent processes Hadoop FSImage files and walks over diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/CyclicIteration.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/CyclicIteration.java index 4685a2e6ae..6045615edc 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/CyclicIteration.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/CyclicIteration.java @@ -53,7 +53,6 @@ public CyclicIteration(NavigableMap navigablemap, K startingkey) { } } - /** {@inheritDoc} */ @Override public Iterator> iterator() { return new CyclicIterator(); @@ -89,13 +88,11 @@ private Map.Entry nextEntry() { return i.next(); } - /** {@inheritDoc} */ @Override public boolean hasNext() { return hasnext; } - /** {@inheritDoc} */ @Override public Map.Entry next() { if (!hasnext) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java index 4724595d4a..893e0b7cb6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java @@ -271,7 +271,6 @@ public void run(DatanodeID id) throws IOException { } } - /** {@inheritDoc} */ @Override public String toString() { return error + " " + super.toString(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/hdfs/PipelinesTestUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/hdfs/PipelinesTestUtil.java index 8f4f9c2be6..888fadf5dd 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/hdfs/PipelinesTestUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/aop/org/apache/hadoop/hdfs/PipelinesTestUtil.java @@ -51,9 +51,6 @@ public ReceivedCheckAction(String name) { this.name = name; } - /** - * {@inheritDoc} - */ @Override public void run(NodeBytes nb) throws IOException { synchronized (rcv) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSMkdirs.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSMkdirs.java index bea29f9c67..cec0c594a7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSMkdirs.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSMkdirs.java @@ -19,7 +19,6 @@ import static org.junit.Assert.*; -import java.io.DataOutputStream; import java.io.FileNotFoundException; import java.io.IOException; @@ -29,9 +28,7 @@ import org.apache.hadoop.fs.ParentNotDirectoryException; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; -import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol; import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols; -import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Time; import org.junit.Test; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestGenericJournalConf.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestGenericJournalConf.java index a941ae424d..43bd7a4153 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestGenericJournalConf.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestGenericJournalConf.java @@ -27,7 +27,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.MiniDFSCluster; -import org.apache.hadoop.hdfs.server.common.Storage.FormatConfirmable; import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.junit.Test; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecondaryNameNodeUpgrade.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecondaryNameNodeUpgrade.java index 6119584c0d..f3925c963c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecondaryNameNodeUpgrade.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecondaryNameNodeUpgrade.java @@ -23,10 +23,7 @@ import org.junit.Test; import org.junit.Before; -import org.junit.After; - import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.FileUtil; @@ -35,9 +32,6 @@ import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; -import java.util.Properties; -import java.io.FileReader; -import java.io.FileWriter; import org.junit.Assert; import org.apache.hadoop.test.GenericTestUtils;