From 092ebdf885468a2bf79cbfb168286b7cddc4a0db Mon Sep 17 00:00:00 2001 From: Akira Ajisaka Date: Fri, 23 Jun 2017 10:28:58 +0900 Subject: [PATCH] HADOOP-12940. Fix warnings from Spotbugs in hadoop-common. --- .../MultiSchemeAuthenticationHandler.java | 11 ++++---- .../dev-support/findbugsExcludeFile.xml | 26 +++++++++++++++++++ .../java/org/apache/hadoop/fs/FileUtil.java | 6 +++-- .../apache/hadoop/fs/RawLocalFileSystem.java | 17 +++++++----- .../fs/shell/CommandWithDestination.java | 2 +- .../org/apache/hadoop/io/DoubleWritable.java | 4 +-- .../org/apache/hadoop/io/FloatWritable.java | 6 ++--- .../java/org/apache/hadoop/io/IOUtils.java | 9 ++++--- .../hadoop/io/erasurecode/ECSchema.java | 4 +-- .../apache/hadoop/io/file/tfile/Utils.java | 2 +- .../ZKDelegationTokenSecretManager.java | 8 ++---- .../apache/hadoop/util/SysInfoWindows.java | 6 ++--- .../org/apache/hadoop/minikdc/MiniKdc.java | 7 +++-- 13 files changed, 70 insertions(+), 38 deletions(-) diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/MultiSchemeAuthenticationHandler.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/MultiSchemeAuthenticationHandler.java index aa491003bb..58a0adb237 100644 --- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/MultiSchemeAuthenticationHandler.java +++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/MultiSchemeAuthenticationHandler.java @@ -186,11 +186,12 @@ public AuthenticationToken authenticate(HttpServletRequest request, String authorization = request.getHeader(HttpConstants.AUTHORIZATION_HEADER); if (authorization != null) { - for (String scheme : schemeToAuthHandlerMapping.keySet()) { - if (AuthenticationHandlerUtil.matchAuthScheme(scheme, authorization)) { - AuthenticationHandler handler = - schemeToAuthHandlerMapping.get(scheme); - AuthenticationToken token = handler.authenticate(request, response); + for (Map.Entry entry : + schemeToAuthHandlerMapping.entrySet()) { + if (AuthenticationHandlerUtil.matchAuthScheme( + entry.getKey(), authorization)) { + AuthenticationToken token = + entry.getValue().authenticate(request, response); logger.trace("Token generated with type {}", token.getType()); return token; } diff --git a/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml b/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml index cdd88f3c78..de76afbcbc 100644 --- a/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml +++ b/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml @@ -416,4 +416,30 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java index 0b27a7e326..b656a878e6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java @@ -115,8 +115,10 @@ public static void fullyDeleteOnExit(final File file) { file.deleteOnExit(); if (file.isDirectory()) { File[] files = file.listFiles(); - for (File child : files) { - fullyDeleteOnExit(child); + if (files != null) { + for (File child : files) { + fullyDeleteOnExit(child); + } } } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java index e4e0659b13..721d0c058a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java @@ -384,13 +384,16 @@ public final boolean handleEmptyDstDirectoryOnWindows(Path src, File srcFile, // again. try { FileStatus sdst = this.getFileStatus(dst); - if (sdst.isDirectory() && dstFile.list().length == 0) { - if (LOG.isDebugEnabled()) { - LOG.debug("Deleting empty destination and renaming " + src + " to " + - dst); - } - if (this.delete(dst, false) && srcFile.renameTo(dstFile)) { - return true; + String[] dstFileList = dstFile.list(); + if (dstFileList != null) { + if (sdst.isDirectory() && dstFileList.length == 0) { + if (LOG.isDebugEnabled()) { + LOG.debug("Deleting empty destination and renaming " + src + + " to " + dst); + } + if (this.delete(dst, false) && srcFile.renameTo(dstFile)) { + return true; + } } } } catch (FileNotFoundException ignored) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java index 578d22b48c..2a483c0a50 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java @@ -501,7 +501,7 @@ FSDataOutputStream create(PathData item, boolean lazyPersist, createFlags, getConf().getInt(IO_FILE_BUFFER_SIZE_KEY, IO_FILE_BUFFER_SIZE_DEFAULT), - lazyPersist ? 1 : getDefaultReplication(item.path), + (short) 1, getDefaultBlockSize(), null, null); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java index 5cc326fe3c..f45ed0a82e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java @@ -75,7 +75,7 @@ public int hashCode() { @Override public int compareTo(DoubleWritable o) { - return (value < o.value ? -1 : (value == o.value ? 0 : 1)); + return Double.compare(value, o.value); } @Override @@ -94,7 +94,7 @@ public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { double thisValue = readDouble(b1, s1); double thatValue = readDouble(b2, s2); - return (thisValue < thatValue ? -1 : (thisValue == thatValue ? 0 : 1)); + return Double.compare(thisValue, thatValue); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java index 21e4cc4f5b..367fc946da 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java @@ -66,9 +66,7 @@ public int hashCode() { /** Compares two FloatWritables. */ @Override public int compareTo(FloatWritable o) { - float thisValue = this.value; - float thatValue = o.value; - return (thisValue listDirectory(File dir, FilenameFilter filter) try (DirectoryStream stream = Files.newDirectoryStream(dir.toPath())) { for (Path entry: stream) { - String fileName = entry.getFileName().toString(); - if ((filter == null) || filter.accept(dir, fileName)) { - list.add(fileName); + Path fileName = entry.getFileName(); + if (fileName != null) { + String fileNameStr = fileName.toString(); + if ((filter == null) || filter.accept(dir, fileNameStr)) { + list.add(fileNameStr); + } } } } catch (DirectoryIteratorException e) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ECSchema.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ECSchema.java index e55fbdd876..4d66019a35 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ECSchema.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ECSchema.java @@ -189,8 +189,8 @@ public String toString() { sb.append((extraOptions.isEmpty() ? "" : ", ")); int i = 0; - for (String opt : extraOptions.keySet()) { - sb.append(opt + "=" + extraOptions.get(opt) + + for (Map.Entry entry : extraOptions.entrySet()) { + sb.append(entry.getKey() + "=" + entry.getValue() + (++i < extraOptions.size() ? ", " : "")); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Utils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Utils.java index 5743c66798..8cb6e0d95c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Utils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Utils.java @@ -395,7 +395,7 @@ public boolean equals(Object other) { @Override public int hashCode() { - return (major << 16 + minor); + return (major << 16) + minor; } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/ZKDelegationTokenSecretManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/ZKDelegationTokenSecretManager.java index 4a7ddb2465..88bd29bd73 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/ZKDelegationTokenSecretManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/ZKDelegationTokenSecretManager.java @@ -881,11 +881,9 @@ private void addOrUpdateToken(TokenIdent ident, String nodeCreatePath = getNodePath(ZK_DTSM_TOKENS_ROOT, DELEGATION_TOKEN_PREFIX + ident.getSequenceNumber()); - ByteArrayOutputStream tokenOs = new ByteArrayOutputStream(); - DataOutputStream tokenOut = new DataOutputStream(tokenOs); - ByteArrayOutputStream seqOs = new ByteArrayOutputStream(); - try { + try (ByteArrayOutputStream tokenOs = new ByteArrayOutputStream(); + DataOutputStream tokenOut = new DataOutputStream(tokenOs)) { ident.write(tokenOut); tokenOut.writeLong(info.getRenewDate()); tokenOut.writeInt(info.getPassword().length); @@ -902,8 +900,6 @@ private void addOrUpdateToken(TokenIdent ident, zkClient.create().withMode(CreateMode.PERSISTENT) .forPath(nodeCreatePath, tokenOs.toByteArray()); } - } finally { - seqOs.close(); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoWindows.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoWindows.java index 8b4ea54602..bce2d6d3b8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoWindows.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoWindows.java @@ -169,7 +169,7 @@ public long getAvailablePhysicalMemorySize() { /** {@inheritDoc} */ @Override - public int getNumProcessors() { + public synchronized int getNumProcessors() { refreshIfNeeded(); return numProcessors; } @@ -196,7 +196,7 @@ public long getCumulativeCpuTime() { /** {@inheritDoc} */ @Override - public float getCpuUsagePercentage() { + public synchronized float getCpuUsagePercentage() { refreshIfNeeded(); float ret = cpuUsage; if (ret != -1) { @@ -207,7 +207,7 @@ public float getCpuUsagePercentage() { /** {@inheritDoc} */ @Override - public float getNumVCoresUsed() { + public synchronized float getNumVCoresUsed() { refreshIfNeeded(); float ret = cpuUsage; if (ret != -1) { diff --git a/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java b/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java index fe194c00e4..6f91018adb 100644 --- a/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java +++ b/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java @@ -365,8 +365,11 @@ private void delete(File f) { LOG.warn("WARNING: cannot delete file " + f.getAbsolutePath()); } } else { - for (File c: f.listFiles()) { - delete(c); + File[] fileList = f.listFiles(); + if (fileList != null) { + for (File c : fileList) { + delete(c); + } } if (! f.delete()) { LOG.warn("WARNING: cannot delete directory " + f.getAbsolutePath());