From 618c9218eeed2dc0388010e04349b3df8d6c5b70 Mon Sep 17 00:00:00 2001 From: Viraj Jasani Date: Thu, 8 Jul 2021 12:33:40 +0530 Subject: [PATCH] HADOOP-17788. Replace IOUtils#closeQuietly usages by Hadoop's own utility (#3171) Reviewed-by: Steve Loughran Reviewed-by: Akira Ajisaka Signed-off-by: Takanobu Asanuma --- .../TestZStandardCompressorDecompressor.java | 36 +++++++------------ .../apache/hadoop/test/GenericTestUtils.java | 17 ++++----- .../apache/hadoop/util/TestSysInfoLinux.java | 9 ++--- .../hadoop/crypto/key/kms/server/MiniKMS.java | 6 ++-- .../apache/hadoop/hdfs/DFSInputStream.java | 6 ++-- .../apache/hadoop/hdfs/client/HdfsUtils.java | 13 ++----- .../shortcircuit/DomainSocketFactory.java | 4 +-- .../hdfs/server/datanode/BlockSender.java | 4 +-- .../server/datanode/DataXceiverServer.java | 10 +++--- .../hdfs/server/datanode/FileIoProvider.java | 10 +++--- .../server/datanode/ShortCircuitRegistry.java | 6 ++-- .../fsdataset/impl/FsDatasetCache.java | 10 +++--- .../fsdataset/impl/MappableBlockLoader.java | 7 +--- .../impl/MemoryMappableBlockLoader.java | 6 +--- .../impl/NativePmemMappableBlockLoader.java | 14 ++------ .../impl/PmemMappableBlockLoader.java | 6 ++-- .../fsdataset/impl/PmemVolumeManager.java | 4 +-- .../hdfs/server/namenode/CacheManager.java | 4 +-- .../hdfs/server/namenode/NamenodeFsck.java | 4 +-- .../hdfs/client/impl/BlockReaderTestUtil.java | 4 +-- .../fsdataset/impl/LazyPersistTestCase.java | 23 +++++------- .../impl/TestScrLazyPersistFiles.java | 7 ++-- .../fsdataset/impl/TestSpaceReservation.java | 7 ++-- .../hadoop/mapreduce/v2/app/MRAppMaster.java | 20 ++++------- .../distributedshell/ApplicationMaster.java | 7 ++-- .../applications/distributedshell/Client.java | 9 ++--- .../distributedshell/Log4jPropertyHelper.java | 13 ++----- .../hadoop/yarn/client/cli/LogsCLI.java | 4 +-- .../client/api/impl/TestYarnClientImpl.java | 4 +-- .../hadoop/yarn/client/cli/TestLogsCLI.java | 14 +++----- .../yarn/logaggregation/LogCLIHelpers.java | 4 +-- .../ifile/IndexedFileAggregatedLogsBlock.java | 5 +-- .../yarn/util/ProcfsBasedProcessTree.java | 2 +- ...stLogAggregationIndexedFileController.java | 8 ++--- .../nodemanager/webapp/NMWebServices.java | 4 +-- .../container/TestResourceMappings.java | 9 ++--- .../TestConfigurationNodeLabelsProvider.java | 4 +-- ...ServicesDelegationTokenAuthentication.java | 17 +++++---- .../TimelineReaderWebServicesUtils.java | 4 +-- 39 files changed, 130 insertions(+), 215 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/zstd/TestZStandardCompressorDecompressor.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/zstd/TestZStandardCompressorDecompressor.java index dcfb7e9e32..f12226d897 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/zstd/TestZStandardCompressorDecompressor.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/zstd/TestZStandardCompressorDecompressor.java @@ -16,10 +16,10 @@ package org.apache.hadoop.io.compress.zstd; import org.apache.commons.io.FileUtils; -import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.DataOutputBuffer; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.compress.CompressionInputStream; import org.apache.hadoop.io.compress.CompressionOutputStream; import org.apache.hadoop.io.compress.Compressor; @@ -198,18 +198,16 @@ public void testSetInputWithBytesSizeMoreThenDefaultZStandardBufferSize() @Test public void testCompressorDecompressorLogicWithCompressionStreams() throws Exception { - DataOutputStream deflateOut = null; DataInputStream inflateIn = null; int byteSize = 1024 * 100; byte[] bytes = generate(byteSize); int bufferSize = IO_FILE_BUFFER_SIZE_DEFAULT; - try { - DataOutputBuffer compressedDataBuffer = new DataOutputBuffer(); - CompressionOutputStream deflateFilter = - new CompressorStream(compressedDataBuffer, new ZStandardCompressor(), - bufferSize); - deflateOut = - new DataOutputStream(new BufferedOutputStream(deflateFilter)); + DataOutputBuffer compressedDataBuffer = new DataOutputBuffer(); + CompressionOutputStream deflateFilter = + new CompressorStream(compressedDataBuffer, new ZStandardCompressor(), + bufferSize); + try (DataOutputStream deflateOut = + new DataOutputStream(new BufferedOutputStream(deflateFilter))) { deflateOut.write(bytes, 0, bytes.length); deflateOut.flush(); deflateFilter.finish(); @@ -229,8 +227,7 @@ public void testCompressorDecompressorLogicWithCompressionStreams() assertArrayEquals("original array not equals compress/decompressed array", result, bytes); } finally { - IOUtils.closeQuietly(deflateOut); - IOUtils.closeQuietly(inflateIn); + IOUtils.closeStream(inflateIn); } } @@ -358,18 +355,15 @@ public void testDecompressingOutput() throws Exception { codec.createDecompressor()); byte[] toDecompress = new byte[100]; - ByteArrayOutputStream baos = new ByteArrayOutputStream(); byte[] decompressedResult; int totalFileSize = 0; - int result = toDecompress.length; - try { + try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) { + int result = toDecompress.length; while ((result = inputStream.read(toDecompress, 0, result)) != -1) { baos.write(toDecompress, 0, result); totalFileSize += result; } decompressedResult = baos.toByteArray(); - } finally { - IOUtils.closeQuietly(baos); } assertEquals(decompressedResult.length, totalFileSize); @@ -435,20 +429,16 @@ public void testReadingWithAStream() throws Exception { ZStandardCodec codec = new ZStandardCodec(); codec.setConf(CONFIGURATION); Decompressor decompressor = codec.createDecompressor(); - CompressionInputStream cis = - codec.createInputStream(inputStream, decompressor); - ByteArrayOutputStream baos = new ByteArrayOutputStream(); byte[] resultOfDecompression; - try { + try (CompressionInputStream cis = + codec.createInputStream(inputStream, decompressor); + ByteArrayOutputStream baos = new ByteArrayOutputStream()) { byte[] buffer = new byte[100]; int n; while ((n = cis.read(buffer, 0, buffer.length)) != -1) { baos.write(buffer, 0, n); } resultOfDecompression = baos.toByteArray(); - } finally { - IOUtils.closeQuietly(baos); - IOUtils.closeQuietly(cis); } byte[] expected = FileUtils.readFileToByteArray(uncompressedFile); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java index effb2608a2..4f6a625955 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java @@ -47,13 +47,13 @@ import java.util.function.Supplier; import java.util.regex.Pattern; -import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.RandomStringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.impl.Log4JLogger; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.BlockingThreadPoolExecutorService; import org.apache.hadoop.util.DurationInfo; import org.apache.hadoop.util.StringUtils; @@ -500,7 +500,7 @@ public String getOutput() { @Override public void close() throws Exception { - IOUtils.closeQuietly(bytesPrintStream); + IOUtils.closeStream(bytesPrintStream); System.setErr(oldErr); } } @@ -821,12 +821,10 @@ public static void assumeInNativeProfile() { */ public static String getFilesDiff(File a, File b) throws IOException { StringBuilder bld = new StringBuilder(); - BufferedReader ra = null, rb = null; - try { - ra = new BufferedReader( - new InputStreamReader(new FileInputStream(a))); - rb = new BufferedReader( - new InputStreamReader(new FileInputStream(b))); + try (BufferedReader ra = new BufferedReader( + new InputStreamReader(new FileInputStream(a))); + BufferedReader rb = new BufferedReader( + new InputStreamReader(new FileInputStream(b)))) { while (true) { String la = ra.readLine(); String lb = rb.readLine(); @@ -846,9 +844,6 @@ public static String getFilesDiff(File a, File b) throws IOException { bld.append(" + ").append(lb).append("\n"); } } - } finally { - IOUtils.closeQuietly(ra); - IOUtils.closeQuietly(rb); } return bld.toString(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestSysInfoLinux.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestSysInfoLinux.java index 0ae5d3ce8c..f8ef7f2aa3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestSysInfoLinux.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestSysInfoLinux.java @@ -23,8 +23,6 @@ import java.io.IOException; import java.util.Random; -import org.apache.commons.io.IOUtils; -import org.apache.hadoop.fs.Path; import org.apache.hadoop.test.GenericTestUtils; import org.junit.Test; @@ -521,12 +519,9 @@ public void testCoreCounts() throws IOException { private void writeFakeCPUInfoFile(String content) throws IOException { File tempFile = new File(FAKE_CPUFILE); - FileWriter fWriter = new FileWriter(FAKE_CPUFILE); - tempFile.deleteOnExit(); - try { + try (FileWriter fWriter = new FileWriter(FAKE_CPUFILE)) { + tempFile.deleteOnExit(); fWriter.write(content); - } finally { - IOUtils.closeQuietly(fWriter); } } diff --git a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java index bc4bbc3df7..03208befc2 100644 --- a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java +++ b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java @@ -105,11 +105,9 @@ public MiniKMS(String kmsConfDir, String log4ConfFile, String keyStore, private void copyResource(String inputResourceName, File outputFile) throws IOException { - InputStream is = ThreadUtil.getResourceAsStream(inputResourceName); - try (OutputStream os = new FileOutputStream(outputFile)) { + try (InputStream is = ThreadUtil.getResourceAsStream(inputResourceName); + OutputStream os = new FileOutputStream(outputFile)) { IOUtils.copy(is, os); - } finally { - IOUtils.closeQuietly(is); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java index 4e21133d7e..6e15251d66 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java @@ -44,7 +44,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.io.IOUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.fs.ByteBufferPositionedReadable; import org.apache.hadoop.fs.ByteBufferReadable; @@ -77,6 +76,7 @@ import org.apache.hadoop.hdfs.shortcircuit.ClientMmap; import org.apache.hadoop.hdfs.util.IOUtilsClient; import org.apache.hadoop.io.ByteBufferPool; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RetriableException; @@ -1919,7 +1919,7 @@ private synchronized ByteBuffer tryReadZeroCopy(int maxLength, success = true; } finally { if (!success) { - IOUtils.closeQuietly(clientMmap); + IOUtils.closeStream(clientMmap); } } return buffer; @@ -1934,7 +1934,7 @@ public synchronized void releaseBuffer(ByteBuffer buffer) { "that was not created by this stream, " + buffer); } if (val instanceof ClientMmap) { - IOUtils.closeQuietly((ClientMmap)val); + IOUtils.closeStream((ClientMmap)val); } else if (val instanceof ByteBufferPool) { ((ByteBufferPool)val).putBuffer(buffer); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsUtils.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsUtils.java index 3b77a3f8ee..b4a81c189e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsUtils.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsUtils.java @@ -20,7 +20,6 @@ import java.io.IOException; import java.net.URI; -import org.apache.commons.io.IOUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -63,24 +62,18 @@ public static boolean isHealthy(URI uri) { conf.setInt( CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 0); - DistributedFileSystem fs = null; - try { - fs = (DistributedFileSystem)FileSystem.get(uri, conf); + try (DistributedFileSystem fs = + (DistributedFileSystem) FileSystem.get(uri, conf)) { final boolean safemode = fs.setSafeMode(SafeModeAction.SAFEMODE_GET); if (LOG.isDebugEnabled()) { LOG.debug("Is namenode in safemode? " + safemode + "; uri=" + uri); } - - fs.close(); - fs = null; return !safemode; - } catch(IOException e) { + } catch (IOException e) { if (LOG.isDebugEnabled()) { LOG.debug("Got an exception for uri=" + uri, e); } return false; - } finally { - IOUtils.closeQuietly(fs); } } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/shortcircuit/DomainSocketFactory.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/shortcircuit/DomainSocketFactory.java index 4043639588..0fd9a13fa6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/shortcircuit/DomainSocketFactory.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/shortcircuit/DomainSocketFactory.java @@ -21,8 +21,8 @@ import java.net.InetSocketAddress; import java.util.concurrent.TimeUnit; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting; -import org.apache.commons.io.IOUtils; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.hdfs.DFSUtilClient; import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys; @@ -174,7 +174,7 @@ public DomainSocket createSocket(PathInfo info, int socketTimeout) { } finally { if (!success) { if (sock != null) { - IOUtils.closeQuietly(sock); + IOUtils.closeStream(sock); } pathMap.put(info.getPath(), PathState.UNUSABLE); sock = null; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java index bb75e3aceb..eb60ab2d77 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java @@ -434,8 +434,8 @@ class BlockSender implements java.io.Closeable { } catch (IOException ioe) { IOUtils.cleanupWithLogger(null, volumeRef); IOUtils.closeStream(this); - org.apache.commons.io.IOUtils.closeQuietly(blockIn); - org.apache.commons.io.IOUtils.closeQuietly(checksumIn); + IOUtils.closeStream(blockIn); + IOUtils.closeStream(checksumIn); throw ioe; } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataXceiverServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataXceiverServer.java index b953760ea9..186bdb5e4f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataXceiverServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataXceiverServer.java @@ -27,12 +27,12 @@ import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.net.Peer; import org.apache.hadoop.hdfs.net.PeerServer; import org.apache.hadoop.hdfs.util.DataTransferThrottler; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.Daemon; import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting; @@ -248,10 +248,10 @@ public void run() { LOG.warn("{}:DataXceiverServer", datanode.getDisplayName(), ace); } } catch (IOException ie) { - IOUtils.closeQuietly(peer); + IOUtils.closeStream(peer); LOG.warn("{}:DataXceiverServer", datanode.getDisplayName(), ie); } catch (OutOfMemoryError ie) { - IOUtils.closeQuietly(peer); + IOUtils.closeStream(peer); // DataNode can run out of memory if there is too many transfers. // Log the event, Sleep for 30 seconds, other transfers may complete by // then. @@ -334,7 +334,7 @@ void closePeer(Peer peer) { peers.remove(peer); peersXceiver.remove(peer); datanode.metrics.decrDataNodeActiveXceiversCount(); - IOUtils.closeQuietly(peer); + IOUtils.closeStream(peer); if (peers.isEmpty()) { this.noPeers.signalAll(); } @@ -396,7 +396,7 @@ void closeAllPeers() { LOG.info("Closing all peers."); lock.lock(); try { - peers.keySet().forEach(p -> IOUtils.closeQuietly(p)); + peers.keySet().forEach(IOUtils::closeStream); peers.clear(); peersXceiver.clear(); datanode.metrics.setDataNodeActiveXceiversCount(0); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/FileIoProvider.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/FileIoProvider.java index cf6902912f..11577c3d7b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/FileIoProvider.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/FileIoProvider.java @@ -336,7 +336,7 @@ public FileInputStream getFileInputStream( profilingEventHook.afterMetadataOp(volume, OPEN, begin); return fis; } catch(Exception e) { - org.apache.commons.io.IOUtils.closeQuietly(fis); + IOUtils.closeStream(fis); onFailure(volume, begin); throw e; } @@ -367,7 +367,7 @@ public FileOutputStream getFileOutputStream( profilingEventHook.afterMetadataOp(volume, OPEN, begin); return fos; } catch(Exception e) { - org.apache.commons.io.IOUtils.closeQuietly(fos); + IOUtils.closeStream(fos); onFailure(volume, begin); throw e; } @@ -432,7 +432,7 @@ public FileInputStream getShareDeleteFileInputStream( profilingEventHook.afterMetadataOp(volume, OPEN, begin); return fis; } catch(Exception e) { - org.apache.commons.io.IOUtils.closeQuietly(fis); + IOUtils.closeStream(fis); onFailure(volume, begin); throw e; } @@ -464,7 +464,7 @@ public FileInputStream openAndSeek( profilingEventHook.afterMetadataOp(volume, OPEN, begin); return fis; } catch(Exception e) { - org.apache.commons.io.IOUtils.closeQuietly(fis); + IOUtils.closeStream(fis); onFailure(volume, begin); throw e; } @@ -495,7 +495,7 @@ public RandomAccessFile getRandomAccessFile( profilingEventHook.afterMetadataOp(volume, OPEN, begin); return raf; } catch(Exception e) { - org.apache.commons.io.IOUtils.closeQuietly(raf); + IOUtils.closeStream(raf); onFailure(volume, begin); throw e; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ShortCircuitRegistry.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ShortCircuitRegistry.java index cb8dfaf262..4b7d2c944d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ShortCircuitRegistry.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ShortCircuitRegistry.java @@ -30,8 +30,8 @@ import java.util.Iterator; import java.util.Set; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting; -import org.apache.commons.io.IOUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; @@ -322,7 +322,7 @@ public NewShmInfo createNewMemorySegment(String clientName, shm = new RegisteredShm(clientName, shmId, fis, this); } finally { if (shm == null) { - IOUtils.closeQuietly(fis); + IOUtils.closeStream(fis); } } info = new NewShmInfo(shmId, fis); @@ -392,7 +392,7 @@ public void shutdown() { if (!enabled) return; enabled = false; } - IOUtils.closeQuietly(watcher); + IOUtils.closeStream(watcher); } public static interface Visitor { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetCache.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetCache.java index facace2860..417e266474 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetCache.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetCache.java @@ -23,6 +23,7 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_CACHE_REVOCATION_POLLING_MS; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_CACHE_REVOCATION_POLLING_MS_DEFAULT; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; @@ -44,7 +45,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.LongAdder; -import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.time.DurationFormatUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -473,15 +473,15 @@ public void run() { dataset.datanode.getMetrics().incrBlocksCached(1); success = true; } finally { - IOUtils.closeQuietly(blockIn); - IOUtils.closeQuietly(metaIn); + IOUtils.closeStream(blockIn); + IOUtils.closeStream(metaIn); if (!success) { if (reservedBytes) { cacheLoader.release(key, length); } LOG.debug("Caching of {} was aborted. We are now caching only {} " + "bytes in total.", key, cacheLoader.getCacheUsed()); - IOUtils.closeQuietly(mappableBlock); + IOUtils.closeStream(mappableBlock); numBlocksFailedToCache.increment(); synchronized (FsDatasetCache.this) { @@ -555,7 +555,7 @@ public void run() { Preconditions.checkNotNull(value); Preconditions.checkArgument(value.state == State.UNCACHING); - IOUtils.closeQuietly(value.mappableBlock); + IOUtils.closeStream(value.mappableBlock); synchronized (FsDatasetCache.this) { mappableBlockMap.remove(key); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/MappableBlockLoader.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/MappableBlockLoader.java index 96d88345e6..a35c6f2a31 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/MappableBlockLoader.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/MappableBlockLoader.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hdfs.server.datanode.fsdataset.impl; import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; -import org.apache.commons.io.IOUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hdfs.ExtendedBlockId; @@ -136,9 +135,7 @@ protected void verifyChecksum(long length, FileInputStream metaIn, BlockMetadataHeader.readHeader(new DataInputStream( new BufferedInputStream(metaIn, BlockMetadataHeader .getHeaderSize()))); - FileChannel metaChannel = null; - try { - metaChannel = metaIn.getChannel(); + try (FileChannel metaChannel = metaIn.getChannel()) { if (metaChannel == null) { throw new IOException( "Block InputStream meta file has no FileChannel."); @@ -172,8 +169,6 @@ protected void verifyChecksum(long length, FileInputStream metaIn, blockBuf.clear(); checksumBuf.clear(); } - } finally { - IOUtils.closeQuietly(metaChannel); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/MemoryMappableBlockLoader.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/MemoryMappableBlockLoader.java index a7853f3a18..eacf66cc74 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/MemoryMappableBlockLoader.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/MemoryMappableBlockLoader.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hdfs.server.datanode.fsdataset.impl; -import org.apache.commons.io.IOUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hdfs.ExtendedBlockId; @@ -73,9 +72,7 @@ MappableBlock load(long length, FileInputStream blockIn, throws IOException { MemoryMappedBlock mappableBlock = null; MappedByteBuffer mmap = null; - FileChannel blockChannel = null; - try { - blockChannel = blockIn.getChannel(); + try (FileChannel blockChannel = blockIn.getChannel()) { if (blockChannel == null) { throw new IOException("Block InputStream has no FileChannel."); } @@ -84,7 +81,6 @@ MappableBlock load(long length, FileInputStream blockIn, verifyChecksum(length, metaIn, blockChannel, blockFileName); mappableBlock = new MemoryMappedBlock(mmap, length); } finally { - IOUtils.closeQuietly(blockChannel); if (mappableBlock == null) { if (mmap != null) { NativeIO.POSIX.munmap(mmap); // unmapping also unlocks diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/NativePmemMappableBlockLoader.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/NativePmemMappableBlockLoader.java index ec024cda9a..6a5810349e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/NativePmemMappableBlockLoader.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/NativePmemMappableBlockLoader.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hdfs.server.datanode.fsdataset.impl; import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; -import org.apache.commons.io.IOUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hdfs.ExtendedBlockId; @@ -83,9 +82,7 @@ public MappableBlock load(long length, FileInputStream blockIn, POSIX.PmemMappedRegion region = null; String filePath = null; - FileChannel blockChannel = null; - try { - blockChannel = blockIn.getChannel(); + try (FileChannel blockChannel = blockIn.getChannel()) { if (blockChannel == null) { throw new IOException("Block InputStream has no FileChannel."); } @@ -102,10 +99,9 @@ public MappableBlock load(long length, FileInputStream blockIn, mappableBlock = new NativePmemMappedBlock(region.getAddress(), region.getLength(), key); LOG.info("Successfully cached one replica:{} into persistent memory" - + ", [cached path={}, address={}, length={}]", key, filePath, + + ", [cached path={}, address={}, length={}]", key, filePath, region.getAddress(), length); } finally { - IOUtils.closeQuietly(blockChannel); if (mappableBlock == null) { if (region != null) { // unmap content from persistent memory @@ -131,9 +127,7 @@ private void verifyChecksumAndMapBlock(POSIX.PmemMappedRegion region, BlockMetadataHeader.readHeader(new DataInputStream( new BufferedInputStream(metaIn, BlockMetadataHeader .getHeaderSize()))); - FileChannel metaChannel = null; - try { - metaChannel = metaIn.getChannel(); + try (FileChannel metaChannel = metaIn.getChannel()) { if (metaChannel == null) { throw new IOException("Cannot get FileChannel" + " from Block InputStream meta file."); @@ -181,8 +175,6 @@ private void verifyChecksumAndMapBlock(POSIX.PmemMappedRegion region, if (region != null) { POSIX.Pmem.memSync(region); } - } finally { - IOUtils.closeQuietly(metaChannel); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/PmemMappableBlockLoader.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/PmemMappableBlockLoader.java index e8c6ac158e..78108818ac 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/PmemMappableBlockLoader.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/PmemMappableBlockLoader.java @@ -18,11 +18,11 @@ package org.apache.hadoop.hdfs.server.datanode.fsdataset.impl; -import org.apache.commons.io.IOUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hdfs.ExtendedBlockId; import org.apache.hadoop.hdfs.server.datanode.DNConf; +import org.apache.hadoop.io.IOUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -105,8 +105,8 @@ MappableBlock load(long length, FileInputStream blockIn, LOG.info("Successfully cached one replica:{} into persistent memory" + ", [cached path={}, length={}]", key, cachePath, length); } finally { - IOUtils.closeQuietly(blockChannel); - IOUtils.closeQuietly(cacheFile); + IOUtils.closeStream(blockChannel); + IOUtils.closeStream(cacheFile); if (mappableBlock == null) { LOG.debug("Delete {} due to unsuccessful mapping.", cachePath); FsDatasetUtil.deleteMappedFile(cachePath); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/PmemVolumeManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/PmemVolumeManager.java index a85c577745..8f2a2ed974 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/PmemVolumeManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/PmemVolumeManager.java @@ -18,9 +18,9 @@ package org.apache.hadoop.hdfs.server.datanode.fsdataset.impl; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting; import org.apache.commons.io.FileUtils; -import org.apache.commons.io.IOUtils; import org.apache.commons.io.filefilter.TrueFileFilter; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -357,7 +357,7 @@ static File verifyIfValidPmemVolume(File pmemDir) out.clear(); } if (testFile != null) { - IOUtils.closeQuietly(testFile); + IOUtils.closeStream(testFile); NativeIO.POSIX.munmap(out); try { FsDatasetUtil.deleteMappedFile(testFilePath); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java index a937e0aef4..6d5d65204a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CacheManager.java @@ -44,7 +44,6 @@ import java.util.concurrent.locks.ReentrantLock; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; -import org.apache.commons.io.IOUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries; @@ -83,6 +82,7 @@ import org.apache.hadoop.hdfs.server.namenode.startupprogress.Step; import org.apache.hadoop.hdfs.server.namenode.startupprogress.StepType; import org.apache.hadoop.hdfs.util.ReadOnlyList; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.util.GSet; import org.apache.hadoop.util.LightWeightGSet; @@ -281,7 +281,7 @@ public void stopMonitorThread() { if (this.monitor != null) { CacheReplicationMonitor prevMonitor = this.monitor; this.monitor = null; - IOUtils.closeQuietly(prevMonitor); + IOUtils.closeStream(prevMonitor); } } finally { crmLock.unlock(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java index 578c9ebb5a..9568ecb5f3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java @@ -37,8 +37,8 @@ import java.util.Set; import java.util.concurrent.ThreadLocalRandom; -import org.apache.commons.io.IOUtils; import org.apache.hadoop.hdfs.server.blockmanagement.BlockUnderConstructionFeature; +import org.apache.hadoop.io.IOUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -1109,7 +1109,7 @@ public Peer newConnectedPeer(InetSocketAddress addr, blockToken, datanodeId, HdfsConstants.READ_TIMEOUT); } finally { if (peer == null) { - IOUtils.closeQuietly(s); + IOUtils.closeStream(s); } } return peer; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/client/impl/BlockReaderTestUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/client/impl/BlockReaderTestUtil.java index 71f71208d3..826299b0bf 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/client/impl/BlockReaderTestUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/client/impl/BlockReaderTestUtil.java @@ -28,7 +28,6 @@ import java.util.List; import java.util.Random; -import org.apache.commons.io.IOUtils; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.BlockReader; @@ -57,6 +56,7 @@ import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitCache; import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitReplica; import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.test.GenericTestUtils; @@ -220,7 +220,7 @@ public Peer newConnectedPeer(InetSocketAddress addr, peer = DFSUtilClient.peerFromSocket(sock); } finally { if (peer == null) { - IOUtils.closeQuietly(sock); + IOUtils.closeStream(sock); } } return peer; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/LazyPersistTestCase.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/LazyPersistTestCase.java index cb26533229..9774fea1c9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/LazyPersistTestCase.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/LazyPersistTestCase.java @@ -45,8 +45,8 @@ import java.util.UUID; import java.util.concurrent.TimeoutException; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; -import org.apache.commons.io.IOUtils; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo; @@ -151,7 +151,7 @@ public void shutDownCluster() throws Exception { jmx = null; } - IOUtils.closeQuietly(sockDir); + IOUtils.closeStream(sockDir); sockDir = null; } @@ -252,16 +252,13 @@ protected final void makeTestFile(Path path, long length, createFlags.add(LAZY_PERSIST); } - FSDataOutputStream fos = null; - try { - fos = - fs.create(path, - FsPermission.getFileDefault(), - createFlags, - BUFFER_LENGTH, - REPL_FACTOR, - BLOCK_SIZE, - null); + try (FSDataOutputStream fos = fs.create(path, + FsPermission.getFileDefault(), + createFlags, + BUFFER_LENGTH, + REPL_FACTOR, + BLOCK_SIZE, + null)) { // Allocate a block. byte[] buffer = new byte[BUFFER_LENGTH]; @@ -272,8 +269,6 @@ protected final void makeTestFile(Path path, long length, if (length > 0) { fos.hsync(); } - } finally { - IOUtils.closeQuietly(fos); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestScrLazyPersistFiles.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestScrLazyPersistFiles.java index f7aa4c3412..efc203b730 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestScrLazyPersistFiles.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestScrLazyPersistFiles.java @@ -16,8 +16,8 @@ * limitations under the License. */ package org.apache.hadoop.hdfs.server.datanode.fsdataset.impl; + import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; -import org.apache.commons.io.IOUtils; import org.apache.hadoop.fs.ChecksumException; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.ClientContext; @@ -125,8 +125,7 @@ public void tesScrDuringEviction() ensureFileReplicasOnStorageType(path1, RAM_DISK); waitForMetric("RamDiskBlocksLazyPersisted", 1); - HdfsDataInputStream fis = (HdfsDataInputStream) fs.open(path1); - try { + try (HdfsDataInputStream fis = (HdfsDataInputStream) fs.open(path1)) { // Keep and open read handle to path1 while creating path2 byte[] buf = new byte[BUFFER_LENGTH]; fis.read(0, buf, 0, BUFFER_LENGTH); @@ -138,8 +137,6 @@ public void tesScrDuringEviction() is((long) 2 * BUFFER_LENGTH)); assertThat(fis.getReadStatistics().getTotalShortCircuitBytesRead(), is((long) 2 * BUFFER_LENGTH)); - } finally { - IOUtils.closeQuietly(fis); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestSpaceReservation.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestSpaceReservation.java index 5bb67882a8..a702cec7cb 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestSpaceReservation.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestSpaceReservation.java @@ -19,7 +19,8 @@ package org.apache.hadoop.hdfs.server.datanode.fsdataset.impl; import java.util.function.Supplier; -import org.apache.commons.io.IOUtils; + +import org.apache.hadoop.io.IOUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; @@ -554,7 +555,7 @@ public void run() { String filename = "/file-" + rand.nextLong(); os = localClient.create(filename, false); os.write(data, 0, rand.nextInt(data.length)); - IOUtils.closeQuietly(os); + IOUtils.closeStream(os); os = null; localClient.delete(filename, false); Thread.sleep(50); // Sleep for a bit to avoid killing the system. @@ -566,7 +567,7 @@ public void run() { return; } finally { if (os != null) { - IOUtils.closeQuietly(os); + IOUtils.closeStream(os); } } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java index 38884c901b..fe64845b6a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java @@ -41,7 +41,6 @@ import javax.crypto.KeyGenerator; -import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -1474,9 +1473,8 @@ private void parsePreviousJobHistory() throws IOException { private List readJustAMInfos() { List amInfos = new ArrayList(); - FSDataInputStream inputStream = null; - try { - inputStream = getPreviousJobHistoryStream(getConfig(), appAttemptID); + try (FSDataInputStream inputStream = + getPreviousJobHistoryStream(getConfig(), appAttemptID)) { EventReader jobHistoryEventReader = new EventReader(inputStream); // All AMInfos are contiguous. Track when the first AMStartedEvent @@ -1492,11 +1490,11 @@ private List readJustAMInfos() { } AMStartedEvent amStartedEvent = (AMStartedEvent) event; amInfos.add(MRBuilderUtils.newAMInfo( - amStartedEvent.getAppAttemptId(), amStartedEvent.getStartTime(), - amStartedEvent.getContainerId(), - StringInterner.weakIntern(amStartedEvent.getNodeManagerHost()), - amStartedEvent.getNodeManagerPort(), - amStartedEvent.getNodeManagerHttpPort())); + amStartedEvent.getAppAttemptId(), amStartedEvent.getStartTime(), + amStartedEvent.getContainerId(), + StringInterner.weakIntern(amStartedEvent.getNodeManagerHost()), + amStartedEvent.getNodeManagerPort(), + amStartedEvent.getNodeManagerHttpPort())); } else if (amStartedEventsBegan) { // This means AMStartedEvents began and this event is a // non-AMStarted event. @@ -1507,10 +1505,6 @@ private List readJustAMInfos() { } catch (IOException e) { LOG.warn("Could not parse the old history file. " + "Will not have old AMinfos ", e); - } finally { - if (inputStream != null) { - IOUtils.closeQuietly(inputStream); - } } return amInfos; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java index 765ca82230..84c26486fb 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java @@ -1619,12 +1619,9 @@ private boolean fileExist(String filePath) { } private String readContent(String filePath) throws IOException { - DataInputStream ds = null; - try { - ds = new DataInputStream(new FileInputStream(filePath)); + try (DataInputStream ds = new DataInputStream( + new FileInputStream(filePath))) { return ds.readUTF(); - } finally { - org.apache.commons.io.IOUtils.closeQuietly(ds); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java index b271486bb9..42cd09a584 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java @@ -42,7 +42,6 @@ import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -1221,13 +1220,9 @@ private void addToLocalResources(FileSystem fs, String fileSrcPath, Path dst = new Path(fs.getHomeDirectory(), suffix); if (fileSrcPath == null) { - FSDataOutputStream ostream = null; - try { - ostream = FileSystem - .create(fs, dst, new FsPermission((short) 0710)); + try (FSDataOutputStream ostream = FileSystem.create(fs, dst, + new FsPermission((short) 0710))) { ostream.writeUTF(resources); - } finally { - IOUtils.closeQuietly(ostream); } } else { fs.copyFromLocalFile(new Path(fileSrcPath), dst); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Log4jPropertyHelper.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Log4jPropertyHelper.java index cbfe16c6d4..0301a6880f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Log4jPropertyHelper.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Log4jPropertyHelper.java @@ -23,21 +23,17 @@ import java.util.Map.Entry; import java.util.Properties; -import org.apache.commons.io.IOUtils; import org.apache.log4j.LogManager; import org.apache.log4j.PropertyConfigurator; - public class Log4jPropertyHelper { public static void updateLog4jConfiguration(Class targetClass, String log4jPath) throws Exception { Properties customProperties = new Properties(); - FileInputStream fs = null; - InputStream is = null; - try { - fs = new FileInputStream(log4jPath); - is = targetClass.getResourceAsStream("/log4j.properties"); + try ( + FileInputStream fs = new FileInputStream(log4jPath); + InputStream is = targetClass.getResourceAsStream("/log4j.properties")) { customProperties.load(fs); Properties originalProperties = new Properties(); originalProperties.load(is); @@ -47,9 +43,6 @@ public static void updateLog4jConfiguration(Class targetClass, } LogManager.resetConfiguration(); PropertyConfigurator.configure(originalProperties); - }finally { - IOUtils.closeQuietly(is); - IOUtils.closeQuietly(fs); } } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java index ca71f15c31..9921d3e26d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java @@ -18,6 +18,7 @@ package org.apache.hadoop.yarn.client.cli; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting; import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.ClientHandlerException; @@ -55,7 +56,6 @@ import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.math3.util.Pair; import org.apache.hadoop.classification.InterfaceAudience.Private; @@ -643,7 +643,7 @@ public int printContainerLogsFromRunningApplication(Configuration conf, + " for the container:" + containerIdStr + " in NodeManager:" + nodeId); } finally { - IOUtils.closeQuietly(is); + IOUtils.closeStream(is); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestYarnClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestYarnClientImpl.java index cded253333..947fa9985e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestYarnClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestYarnClientImpl.java @@ -18,7 +18,6 @@ package org.apache.hadoop.yarn.client.api.impl; -import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystemTestHelper; @@ -28,6 +27,7 @@ import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.io.DataInputByteBuffer; import org.apache.hadoop.io.DataOutputBuffer; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.Text; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.security.Credentials; @@ -119,7 +119,7 @@ private void testAsyncAPIPollTimeoutHelper(Long valueForTimeout, Assert.assertEquals( expectedTimeoutEnforcement, client.enforceAsyncAPITimeout()); } finally { - IOUtils.closeQuietly(client); + IOUtils.closeStream(client); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java index 2cf5962b99..f7512cf935 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java @@ -36,6 +36,7 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList; import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.ClientResponse; @@ -59,7 +60,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; @@ -939,7 +939,7 @@ public void testGetRunningContainerLogs() throws Exception { logMessage(containerId1, "syslog"))); sysOutStream.reset(); } finally { - IOUtils.closeQuietly(fis); + IOUtils.closeStream(fis); fs.delete(new Path(rootLogDir), true); } } @@ -1477,19 +1477,13 @@ private String readContainerContent(Path containerPath, FileSystem fs) throws IOException { assertTrue(fs.exists(containerPath)); StringBuffer inputLine = new StringBuffer(); - BufferedReader reader = null; - try { - reader = new BufferedReader(new InputStreamReader( - fs.open(containerPath))); + try (BufferedReader reader = new BufferedReader(new InputStreamReader( + fs.open(containerPath)))) { String tmp; while ((tmp = reader.readLine()) != null) { inputLine.append(tmp); } return inputLine.toString(); - } finally { - if (reader != null) { - IOUtils.closeQuietly(reader); - } } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java index dd0279699e..ff48e74e00 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java @@ -26,7 +26,6 @@ import java.util.HashSet; import java.util.List; import java.util.Set; -import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configurable; @@ -35,6 +34,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.api.records.ApplicationId; @@ -407,7 +407,7 @@ private static void logDirNoAccessPermission(String remoteAppLogDir, public void closePrintStream(PrintStream out) { if (out != System.out) { - IOUtils.closeQuietly(out); + IOUtils.closeStream(out); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/IndexedFileAggregatedLogsBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/IndexedFileAggregatedLogsBlock.java index c49d3726be..8390f99dae 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/IndexedFileAggregatedLogsBlock.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/IndexedFileAggregatedLogsBlock.java @@ -28,12 +28,13 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; -import org.apache.commons.io.IOUtils; + import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.compress.Decompressor; import org.apache.hadoop.io.file.tfile.BoundedRangeFileInputStream; import org.apache.hadoop.io.file.tfile.Compression; @@ -232,7 +233,7 @@ private boolean readContainerLog(String compressAlgo, Block html, LOG.error("Error getting logs for " + logEntity, ex); continue; } finally { - IOUtils.closeQuietly(in); + IOUtils.closeStream(in); } } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java index 8bf54b5ddc..470594d853 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java @@ -806,7 +806,7 @@ private static void constructProcessSMAPInfo(ProcessTreeSmapMemInfo pInfo, } catch (Throwable t) { LOG.error(t.toString()); } finally { - IOUtils.closeQuietly(in); + org.apache.hadoop.io.IOUtils.closeStream(in); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/TestLogAggregationIndexedFileController.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/TestLogAggregationIndexedFileController.java index 2da413d798..dfb19d4907 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/TestLogAggregationIndexedFileController.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/TestLogAggregationIndexedFileController.java @@ -32,7 +32,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FSDataOutputStream; @@ -41,6 +40,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; @@ -267,7 +267,7 @@ public boolean isRollover(final FileContext fc, Charset.forName("UTF-8"))); fInput.writeLong(0); } finally { - IOUtils.closeQuietly(fInput); + IOUtils.closeStream(fInput); } meta = fileFormat.readAggregatedLogsMeta( logRequest); @@ -463,7 +463,7 @@ private File createAndWriteLocalLogFile(Path localLogDir, String logType, writer.close(); return file; } finally { - IOUtils.closeQuietly(writer); + IOUtils.closeStream(writer); } } @@ -578,7 +578,7 @@ public void testGetLogMetaFilesOfNode() throws Exception { Charset.forName("UTF-8"))); fInput.writeLong(0); } finally { - IOUtils.closeQuietly(fInput); + IOUtils.closeStream(fInput); } Path nodePath = LogAggregationUtils.getRemoteAppLogDir( diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java index d485c55bc0..3cf9f65909 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java @@ -30,6 +30,7 @@ import java.util.Map.Entry; import java.util.Set; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.yarn.server.nodemanager.containermanager.records.AuxServiceRecord; import org.apache.hadoop.yarn.server.nodemanager.containermanager.records.AuxServiceRecords; import org.apache.hadoop.yarn.server.nodemanager.containermanager.resourceplugin.ResourcePlugin; @@ -55,7 +56,6 @@ import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.StreamingOutput; import javax.ws.rs.core.UriInfo; -import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Unstable; @@ -518,7 +518,7 @@ public void write(OutputStream os) throws IOException, } } } finally { - IOUtils.closeQuietly(fis); + IOUtils.closeStream(fis); } } }; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/TestResourceMappings.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/TestResourceMappings.java index bfe6ab5473..5095fbc7cc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/TestResourceMappings.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/TestResourceMappings.java @@ -18,7 +18,6 @@ package org.apache.hadoop.yarn.server.nodemanager.containermanager.container; import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList; -import org.apache.commons.io.IOUtils; import org.apache.hadoop.yarn.server.nodemanager.api.deviceplugin.Device; import org.junit.Assert; import org.junit.BeforeClass; @@ -103,15 +102,11 @@ public void testAssignedResourcesCanDeserializePreviouslySerializedValues() { * @throws IOException */ private byte[] toBytes(List resources) throws IOException { - ObjectOutputStream oos = null; byte[] bytes; - try { - ByteArrayOutputStream bos = new ByteArrayOutputStream(); - oos = new ObjectOutputStream(bos); + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + try (ObjectOutputStream oos = new ObjectOutputStream(bos)) { oos.writeObject(resources); bytes = bos.toByteArray(); - } finally { - IOUtils.closeQuietly(oos); } return bytes; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/nodelabels/TestConfigurationNodeLabelsProvider.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/nodelabels/TestConfigurationNodeLabelsProvider.java index 2acd9b1b87..3fc6ba4425 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/nodelabels/TestConfigurationNodeLabelsProvider.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/nodelabels/TestConfigurationNodeLabelsProvider.java @@ -25,10 +25,10 @@ import java.net.URL; import java.util.TimerTask; -import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.nodelabels.NodeLabelTestBase; import org.junit.After; @@ -154,7 +154,7 @@ private static void modifyConf(String nodeLabels) conf.set(YarnConfiguration.NM_PROVIDER_CONFIGURED_NODE_PARTITION, nodeLabels); FileOutputStream confStream = new FileOutputStream(nodeLabelsConfigFile); conf.writeXml(confStream); - IOUtils.closeQuietly(confStream); + IOUtils.closeStream(confStream); } private static class XMLPathClassLoader extends ClassLoader { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java index 6eecaa9fea..ea28656834 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java @@ -39,9 +39,9 @@ import javax.xml.bind.JAXBContext; import javax.xml.bind.Marshaller; -import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.minikdc.MiniKdc; import org.apache.hadoop.security.AuthenticationFilterInitializer; import org.apache.hadoop.security.UserGroupInformation; @@ -367,8 +367,8 @@ public Void call() throws Exception { } } } finally { - IOUtils.closeQuietly(reader); - IOUtils.closeQuietly(response); + IOUtils.closeStream(reader); + IOUtils.closeStream(response); } Assert.assertEquals("client2", owner); Token realToken = new Token(); @@ -431,10 +431,10 @@ public String call() throws Exception { setupConn(conn, "POST", MediaType.APPLICATION_JSON, body); InputStream response = conn.getInputStream(); assertEquals(Status.OK.getStatusCode(), conn.getResponseCode()); - BufferedReader reader = null; - try { - reader = new BufferedReader(new InputStreamReader(response, "UTF8")); - for (String line; (line = reader.readLine()) != null;) { + try (BufferedReader reader = new BufferedReader(new InputStreamReader( + response, "UTF8"))) { + String line; + while ((line = reader.readLine()) != null) { JSONObject obj = new JSONObject(line); if (obj.has("token")) { reader.close(); @@ -444,8 +444,7 @@ public String call() throws Exception { } } } finally { - IOUtils.closeQuietly(reader); - IOUtils.closeQuietly(response); + IOUtils.closeStream(response); } return ret; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java index bea81c79f8..57a812247f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java @@ -23,8 +23,8 @@ import javax.servlet.http.HttpServletRequest; -import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field; @@ -180,7 +180,7 @@ private static TimelineFilterList parseFilters(TimelineParser parser) try { return parser.parse(); } finally { - IOUtils.closeQuietly(parser); + IOUtils.closeStream(parser); } }