From 7a3188d054481b9bd563e337901e93476303ce7f Mon Sep 17 00:00:00 2001 From: Giovanni Matteo Fumarola Date: Thu, 2 May 2019 12:58:42 -0700 Subject: [PATCH] HADOOP-16282. Avoid FileStream to improve performance. Contributed by Ayush Saxena. --- .../org/apache/hadoop/conf/Configuration.java | 4 +-- .../hadoop/crypto/random/OsSecureRandom.java | 9 ++--- .../java/org/apache/hadoop/fs/FileUtil.java | 17 +++++---- .../apache/hadoop/fs/shell/CopyCommands.java | 7 ++-- .../apache/hadoop/metrics2/sink/FileSink.java | 6 ++-- .../org/apache/hadoop/net/TableMapping.java | 6 ++-- .../apache/hadoop/security/Credentials.java | 4 +-- .../org/apache/hadoop/security/KDiag.java | 4 +-- .../hadoop/security/LdapGroupsMapping.java | 5 +-- .../hadoop/security/ShellBasedIdMapping.java | 4 +-- .../alias/LocalJavaKeyStoreProvider.java | 6 ++-- .../ssl/FileBasedKeyStoresFactory.java | 5 +-- .../ssl/ReloadingX509TrustManager.java | 5 +-- .../java/org/apache/hadoop/util/ConfTest.java | 4 +-- .../apache/hadoop/util/FileBasedIPList.java | 4 +-- .../apache/hadoop/util/HostsFileReader.java | 5 +-- .../apache/hadoop/util/JsonSerialization.java | 4 +-- .../java/org/apache/hadoop/util/RunJar.java | 8 ++--- .../org/apache/hadoop/util/SysInfoLinux.java | 35 +++++++++++-------- .../apache/hadoop/util/hash/JenkinsHash.java | 6 ++-- .../hadoop/util/TestHostsFileReader.java | 10 +++--- .../hdfs/util/CombinedHostsFileReader.java | 9 ++--- .../hdfs/util/CombinedHostsFileWriter.java | 7 ++-- .../server/HttpFSAuthenticationFilter.java | 7 ++-- .../org/apache/hadoop/lib/server/Server.java | 4 +-- .../hadoop/hdfs/qjournal/server/Journal.java | 3 +- .../server/aliasmap/InMemoryAliasMap.java | 5 +-- .../fsdataset/impl/BlockPoolSlice.java | 3 +- .../fsdataset/impl/FsDatasetImpl.java | 3 +- .../fsdataset/impl/FsDatasetUtil.java | 3 +- .../server/diskbalancer/command/Command.java | 4 +-- .../namenode/EditLogFileInputStream.java | 4 +-- .../hdfs/server/namenode/FSImageFormat.java | 9 ++--- ...ImagePreTransactionalStorageInspector.java | 5 +-- .../hdfs/server/namenode/FSNamesystem.java | 5 +-- .../apache/hadoop/hdfs/tools/DebugAdmin.java | 4 +-- .../OfflineEditsVisitorFactory.java | 5 +-- .../OfflineImageReconstructor.java | 12 +++---- .../OfflineImageViewer.java | 6 ++-- .../TextWriterImageVisitor.java | 6 ++-- .../apache/hadoop/hdfs/util/MD5FileUtils.java | 8 ++--- .../apache/hadoop/tools/RegexCopyFilter.java | 4 +-- 42 files changed, 148 insertions(+), 126 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java index 6251cd498e..c30ce0db84 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java @@ -30,7 +30,6 @@ import java.io.DataInput; import java.io.DataOutput; import java.io.File; -import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; @@ -43,6 +42,7 @@ import java.net.JarURLConnection; import java.net.URL; import java.net.URLConnection; +import java.nio.file.Files; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -3075,7 +3075,7 @@ private XMLStreamReader2 getStreamReader(Resource wrapper, boolean quiet) LOG.debug("parsing File " + file); } reader = (XMLStreamReader2)parse(new BufferedInputStream( - new FileInputStream(file)), ((Path)resource).toString(), + Files.newInputStream(file.toPath())), ((Path) resource).toString(), isRestricted); } } else if (resource instanceof InputStream) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java index 66715916f5..8e191b5514 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java @@ -18,9 +18,10 @@ package org.apache.hadoop.crypto.random; import java.io.Closeable; -import java.io.File; -import java.io.FileInputStream; import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Paths; import java.util.Random; import org.apache.hadoop.classification.InterfaceAudience; @@ -50,7 +51,7 @@ public class OsSecureRandom extends Random implements Closeable, Configurable { private String randomDevPath; - private transient FileInputStream stream; + private transient InputStream stream; private final byte[] reservoir = new byte[RESERVOIR_LENGTH]; @@ -60,7 +61,7 @@ private void fillReservoir(int min) { if (pos >= reservoir.length - min) { try { if (stream == null) { - stream = new FileInputStream(new File(randomDevPath)); + stream = Files.newInputStream(Paths.get(randomDevPath)); } IOUtils.readFully(stream, reservoir, 0, reservoir.length); } catch (IOException e) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java index c849055a3a..4566686a12 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java @@ -22,9 +22,7 @@ import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.File; -import java.io.FileInputStream; import java.io.FileNotFoundException; -import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; @@ -447,7 +445,7 @@ public static boolean copy(File src, InputStream in = null; OutputStream out =null; try { - in = new FileInputStream(src); + in = Files.newInputStream(src.toPath()); out = dstFS.create(dst); IOUtils.copyBytes(in, out, conf); } catch (IOException e) { @@ -495,7 +493,7 @@ private static boolean copy(FileSystem srcFS, FileStatus srcStatus, } } else { InputStream in = srcFS.open(src); - IOUtils.copyBytes(in, new FileOutputStream(dst), conf); + IOUtils.copyBytes(in, Files.newOutputStream(dst.toPath()), conf); } if (deleteSource) { return srcFS.delete(src, true); @@ -639,7 +637,7 @@ public static void unZip(InputStream inputStream, File toDir) throw new IOException("Mkdirs failed to create " + parent.getAbsolutePath()); } - try (OutputStream out = new FileOutputStream(file)) { + try (OutputStream out = Files.newOutputStream(file.toPath())) { IOUtils.copyBytes(zip, out, BUFFER_SIZE); } if (!file.setLastModified(entry.getTime())) { @@ -684,7 +682,7 @@ public static void unZip(File inFile, File unzipDir) throws IOException { file.getParentFile().toString()); } } - OutputStream out = new FileOutputStream(file); + OutputStream out = Files.newOutputStream(file.toPath()); try { byte[] buffer = new byte[8192]; int i; @@ -918,9 +916,10 @@ static void unTarUsingJava(File inFile, File untarDir, TarArchiveInputStream tis = null; try { if (gzipped) { - inputStream = new GZIPInputStream(new FileInputStream(inFile)); + inputStream = + new GZIPInputStream(Files.newInputStream(inFile.toPath())); } else { - inputStream = new FileInputStream(inFile); + inputStream = Files.newInputStream(inFile.toPath()); } inputStream = new BufferedInputStream(inputStream); @@ -1544,7 +1543,7 @@ public static String[] createJarWithClassPath(String inputClassPath, Path pwd, // Write the manifest to output JAR file File classPathJar = File.createTempFile("classpath-", ".jar", workingDir); - try (FileOutputStream fos = new FileOutputStream(classPathJar); + try (OutputStream fos = Files.newOutputStream(classPathJar.toPath()); BufferedOutputStream bos = new BufferedOutputStream(fos)) { JarOutputStream jos = new JarOutputStream(bos, jarManifest); jos.close(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java index f59718f089..4622c75fbd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java @@ -18,10 +18,11 @@ package org.apache.hadoop.fs.shell; -import java.io.FileInputStream; import java.io.IOException; +import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; +import java.nio.file.Files; import java.util.Iterator; import java.util.LinkedList; import java.util.List; @@ -464,7 +465,7 @@ protected void processArguments(LinkedList args) dst.fs.create(dst.path, false).close(); } - FileInputStream is = null; + InputStream is = null; try (FSDataOutputStream fos = dst.fs.append(dst.path)) { if (readStdin) { if (args.size() == 0) { @@ -477,7 +478,7 @@ protected void processArguments(LinkedList args) // Read in each input file and write to the target. for (PathData source : args) { - is = new FileInputStream(source.toFile()); + is = Files.newInputStream(source.toFile().toPath()); IOUtils.copyBytes(is, fos, DEFAULT_IO_LENGTH); IOUtils.closeStream(is); is = null; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/FileSink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/FileSink.java index d67810490c..842ec630aa 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/FileSink.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/FileSink.java @@ -19,10 +19,10 @@ package org.apache.hadoop.metrics2.sink; import java.io.Closeable; -import java.io.File; -import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintStream; +import java.nio.file.Files; +import java.nio.file.Paths; import org.apache.commons.configuration2.SubsetConfiguration; import org.apache.hadoop.classification.InterfaceAudience; @@ -47,7 +47,7 @@ public void init(SubsetConfiguration conf) { String filename = conf.getString(FILENAME_KEY); try { writer = filename == null ? System.out - : new PrintStream(new FileOutputStream(new File(filename)), + : new PrintStream(Files.newOutputStream(Paths.get(filename)), true, "UTF-8"); } catch (Exception e) { throw new MetricsException("Error creating "+ filename, e); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java index 45759df6ad..cd3514c4bc 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java @@ -20,9 +20,10 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.NET_TOPOLOGY_TABLE_MAPPING_FILE_KEY; import java.io.BufferedReader; -import java.io.FileInputStream; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Paths; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -100,7 +101,8 @@ private Map load() { try (BufferedReader reader = new BufferedReader(new InputStreamReader( - new FileInputStream(filename), StandardCharsets.UTF_8))) { + Files.newInputStream(Paths.get(filename)), + StandardCharsets.UTF_8))) { String line = reader.readLine(); while (line != null) { line = line.trim(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java index 4b0d889003..37cf021d41 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java @@ -26,9 +26,9 @@ import java.io.DataOutput; import java.io.DataOutputStream; import java.io.File; -import java.io.FileInputStream; import java.io.IOException; import java.nio.charset.StandardCharsets; +import java.nio.file.Files; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -243,7 +243,7 @@ public static Credentials readTokenStorageFile(File filename, Credentials credentials = new Credentials(); try { in = new DataInputStream(new BufferedInputStream( - new FileInputStream(filename))); + Files.newInputStream(filename.toPath()))); credentials.readTokenStorageStream(in); return credentials; } catch(IOException ioe) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java index 2c6de4d216..f759dbdb44 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java @@ -41,12 +41,12 @@ import java.io.Closeable; import java.io.File; -import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; import java.lang.reflect.InvocationTargetException; import java.net.InetAddress; +import java.nio.file.Files; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; import java.util.Arrays; @@ -922,7 +922,7 @@ private void printEnv(String variable) { * @throws IOException IO problems */ private void dump(File file) throws IOException { - try (FileInputStream in = new FileInputStream(file)) { + try (InputStream in = Files.newInputStream(file.toPath())) { for (String line : IOUtils.readLines(in)) { println("%s", line); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java index 8b85c2ff32..761309d26c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java @@ -17,11 +17,12 @@ */ package org.apache.hadoop.security; -import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collections; import java.util.Hashtable; @@ -836,7 +837,7 @@ String extractPassword(String pwFile) { StringBuilder password = new StringBuilder(); try (Reader reader = new InputStreamReader( - new FileInputStream(pwFile), StandardCharsets.UTF_8)) { + Files.newInputStream(Paths.get(pwFile)), StandardCharsets.UTF_8)) { int c = reader.read(); while (c > -1) { password.append((char)c); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java index 2ed96770a8..92ea83d8f1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java @@ -19,11 +19,11 @@ import java.io.BufferedReader; import java.io.File; -import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; +import java.nio.file.Files; import java.util.HashMap; import java.util.Map; import java.util.regex.Matcher; @@ -583,7 +583,7 @@ static StaticMapping parseStaticMap(File staticMapFile) Map gidMapping = new HashMap(); BufferedReader in = new BufferedReader(new InputStreamReader( - new FileInputStream(staticMapFile), StandardCharsets.UTF_8)); + Files.newInputStream(staticMapFile.toPath()), StandardCharsets.UTF_8)); try { String line = null; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalJavaKeyStoreProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalJavaKeyStoreProvider.java index ce0eb7d2bf..c44e246b9d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalJavaKeyStoreProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalJavaKeyStoreProvider.java @@ -25,8 +25,6 @@ import org.apache.hadoop.util.Shell; import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -69,7 +67,7 @@ protected OutputStream getOutputStreamForKeystore() throws IOException { if (LOG.isDebugEnabled()) { LOG.debug("using '" + file + "' for output stream."); } - FileOutputStream out = new FileOutputStream(file); + OutputStream out = Files.newOutputStream(file.toPath()); return out; } @@ -81,7 +79,7 @@ protected boolean keystoreExists() throws IOException { @Override protected InputStream getInputStreamForFile() throws IOException { - FileInputStream is = new FileInputStream(file); + InputStream is = Files.newInputStream(file.toPath()); return is; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java index e5bdab3347..3531173bb7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java @@ -28,9 +28,10 @@ import javax.net.ssl.KeyManager; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.TrustManager; -import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Paths; import java.security.GeneralSecurityException; import java.security.KeyStore; import java.text.MessageFormat; @@ -170,7 +171,7 @@ public void init(SSLFactory.Mode mode) LOG.debug(mode.toString() + " KeyStore: " + keystoreLocation); } - InputStream is = new FileInputStream(keystoreLocation); + InputStream is = Files.newInputStream(Paths.get(keystoreLocation)); try { keystore.load(is, keystorePassword.toCharArray()); } finally { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509TrustManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509TrustManager.java index 88b045e2f9..b2f0118aaf 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509TrustManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509TrustManager.java @@ -29,8 +29,9 @@ import javax.net.ssl.TrustManagerFactory; import javax.net.ssl.X509TrustManager; import java.io.File; -import java.io.FileInputStream; import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; import java.security.GeneralSecurityException; import java.security.KeyStore; import java.security.cert.CertificateException; @@ -166,7 +167,7 @@ X509TrustManager loadTrustManager() throws IOException, GeneralSecurityException { X509TrustManager trustManager = null; KeyStore ks = KeyStore.getInstance(type); - FileInputStream in = new FileInputStream(file); + InputStream in = Files.newInputStream(file.toPath()); try { ks.load(in, (password == null) ? null : password.toCharArray()); lastLoaded = file.lastModified(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ConfTest.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ConfTest.java index a2cb85f255..359b1c31ae 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ConfTest.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ConfTest.java @@ -20,9 +20,9 @@ import java.io.File; import java.io.FileFilter; -import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; +import java.nio.file.Files; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -282,7 +282,7 @@ public static void main(String[] args) throws IOException { boolean ok = true; for (File file : files) { String path = file.getAbsolutePath(); - List errors = checkConf(new FileInputStream(file)); + List errors = checkConf(Files.newInputStream(file.toPath())); if (errors.isEmpty()) { System.out.println(path + ": valid"); } else { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java index 146f65c6cc..47aa9cc71a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java @@ -22,11 +22,11 @@ import java.io.BufferedReader; import java.io.File; -import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; import java.nio.charset.StandardCharsets; +import java.nio.file.Files; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; @@ -90,7 +90,7 @@ private static String[] readLines(String fileName) throws IOException { if (file.exists()) { try ( Reader fileReader = new InputStreamReader( - new FileInputStream(file), StandardCharsets.UTF_8); + Files.newInputStream(file.toPath()), StandardCharsets.UTF_8); BufferedReader bufferedReader = new BufferedReader(fileReader)) { List lines = new ArrayList(); String line = null; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java index 67b0247729..6f41b8c750 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java @@ -20,6 +20,7 @@ import java.io.*; import java.nio.charset.StandardCharsets; +import java.nio.file.Files; import java.util.Collections; import java.util.Set; import java.util.HashMap; @@ -74,7 +75,7 @@ public HostsFileReader(String includesFile, InputStream inFileInputStream, public static void readFileToSet(String type, String filename, Set set) throws IOException { File file = new File(filename); - FileInputStream fis = new FileInputStream(file); + InputStream fis = Files.newInputStream(file.toPath()); readFileToSetWithFileInputStream(type, filename, fis, set); } @@ -120,7 +121,7 @@ public void refresh() throws IOException { public static void readFileToMap(String type, String filename, Map map) throws IOException { File file = new File(filename); - FileInputStream fis = new FileInputStream(file); + InputStream fis = Files.newInputStream(file.toPath()); readFileToMapWithFileInputStream(type, filename, fis, map); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java index 7e09a61ba9..e043b1dc38 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java @@ -21,10 +21,10 @@ import java.io.EOFException; import java.io.File; import java.io.FileNotFoundException; -import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.nio.file.Files; import java.util.Map; import com.fasterxml.jackson.core.JsonParseException; @@ -190,7 +190,7 @@ public synchronized T load(File jsonFile) */ public void save(File file, T instance) throws IOException { - writeJsonAsBytes(instance, new FileOutputStream(file)); + writeJsonAsBytes(instance, Files.newOutputStream(file.toPath())); } /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java index 4c94dbc05b..50126002b7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java @@ -19,7 +19,6 @@ package org.apache.hadoop.util; import java.io.File; -import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -28,6 +27,7 @@ import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; +import java.nio.file.Files; import java.util.ArrayList; import java.util.Arrays; import java.util.Enumeration; @@ -129,7 +129,7 @@ public static void unJar(InputStream inputStream, File toDir, + " would create file outside of " + toDir); } ensureDirectory(file.getParentFile()); - try (OutputStream out = new FileOutputStream(file)) { + try (OutputStream out = Files.newOutputStream(file.toPath())) { IOUtils.copyBytes(jar, out, BUFFER_SIZE); } if (!file.setLastModified(entry.getTime())) { @@ -166,7 +166,7 @@ public static void unJarAndSave(InputStream inputStream, File toDir, throws IOException{ File file = new File(toDir, name); ensureDirectory(toDir); - try (OutputStream jar = new FileOutputStream(file); + try (OutputStream jar = Files.newOutputStream(file.toPath()); TeeInputStream teeInputStream = new TeeInputStream(inputStream, jar)) { unJar(teeInputStream, toDir, unpackRegex); } @@ -200,7 +200,7 @@ public static void unJar(File jarFile, File toDir, Pattern unpackRegex) + " would create file outside of " + toDir); } ensureDirectory(file.getParentFile()); - try (OutputStream out = new FileOutputStream(file)) { + try (OutputStream out = Files.newOutputStream(file.toPath())) { IOUtils.copyBytes(in, out, BUFFER_SIZE); } if (!file.setLastModified(entry.getTime())) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoLinux.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoLinux.java index 2c2aca3a6b..3591bb81e1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoLinux.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoLinux.java @@ -19,12 +19,12 @@ package org.apache.hadoop.util; import java.io.BufferedReader; -import java.io.FileInputStream; -import java.io.FileNotFoundException; import java.io.InputStreamReader; import java.io.IOException; import java.math.BigInteger; import java.nio.charset.Charset; +import java.nio.file.Files; +import java.nio.file.Paths; import java.util.HashMap; import java.util.HashSet; import java.util.regex.Matcher; @@ -246,9 +246,10 @@ private void readProcMemInfoFile(boolean readAgain) { InputStreamReader fReader; try { fReader = new InputStreamReader( - new FileInputStream(procfsMemFile), Charset.forName("UTF-8")); + Files.newInputStream(Paths.get(procfsMemFile)), + Charset.forName("UTF-8")); in = new BufferedReader(fReader); - } catch (FileNotFoundException f) { + } catch (IOException f) { // shouldn't happen.... LOG.warn("Couldn't read " + procfsMemFile + "; can't determine memory settings"); @@ -316,10 +317,11 @@ private void readProcCpuInfoFile() { BufferedReader in; InputStreamReader fReader; try { - fReader = new InputStreamReader( - new FileInputStream(procfsCpuFile), Charset.forName("UTF-8")); + fReader = + new InputStreamReader(Files.newInputStream(Paths.get(procfsCpuFile)), + Charset.forName("UTF-8")); in = new BufferedReader(fReader); - } catch (FileNotFoundException f) { + } catch (IOException f) { // shouldn't happen.... LOG.warn("Couldn't read " + procfsCpuFile + "; can't determine cpu info"); return; @@ -377,9 +379,10 @@ private void readProcStatFile() { InputStreamReader fReader; try { fReader = new InputStreamReader( - new FileInputStream(procfsStatFile), Charset.forName("UTF-8")); + Files.newInputStream(Paths.get(procfsStatFile)), + Charset.forName("UTF-8")); in = new BufferedReader(fReader); - } catch (FileNotFoundException f) { + } catch (IOException f) { // shouldn't happen.... return; } @@ -431,9 +434,10 @@ private void readProcNetInfoFile() { InputStreamReader fReader; try { fReader = new InputStreamReader( - new FileInputStream(procfsNetFile), Charset.forName("UTF-8")); + Files.newInputStream(Paths.get(procfsNetFile)), + Charset.forName("UTF-8")); in = new BufferedReader(fReader); - } catch (FileNotFoundException f) { + } catch (IOException f) { return; } @@ -485,8 +489,9 @@ private void readProcDisksInfoFile() { BufferedReader in; try { in = new BufferedReader(new InputStreamReader( - new FileInputStream(procfsDisksFile), Charset.forName("UTF-8"))); - } catch (FileNotFoundException f) { + Files.newInputStream(Paths.get(procfsDisksFile)), + Charset.forName("UTF-8"))); + } catch (IOException f) { return; } @@ -552,9 +557,9 @@ int readDiskBlockInformation(String diskName, int defSector) { BufferedReader in; try { in = new BufferedReader(new InputStreamReader( - new FileInputStream(procfsDiskSectorFile), + Files.newInputStream(Paths.get(procfsDiskSectorFile)), Charset.forName("UTF-8"))); - } catch (FileNotFoundException f) { + } catch (IOException f) { return defSector; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java index 8c3b9da4ae..3f62aef00a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/hash/JenkinsHash.java @@ -18,8 +18,10 @@ package org.apache.hadoop.util.hash; -import java.io.FileInputStream; import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Paths; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -252,7 +254,7 @@ public static void main(String[] args) throws IOException { System.err.println("Usage: JenkinsHash filename"); System.exit(-1); } - try (FileInputStream in = new FileInputStream(args[0])) { + try (InputStream in = Files.newInputStream(Paths.get(args[0]))) { byte[] bytes = new byte[512]; int value = 0; JenkinsHash hash = new JenkinsHash(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestHostsFileReader.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestHostsFileReader.java index 24621145b7..fd9966feb0 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestHostsFileReader.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestHostsFileReader.java @@ -18,8 +18,8 @@ package org.apache.hadoop.util; import java.io.File; -import java.io.FileNotFoundException; import java.io.FileWriter; +import java.nio.file.NoSuchFileException; import java.util.Map; import org.apache.hadoop.test.GenericTestUtils; @@ -135,8 +135,8 @@ public void testCreateHostFileReaderWithNonexistentFile() throws Exception { new HostsFileReader( HOSTS_TEST_DIR + "/doesnt-exist", HOSTS_TEST_DIR + "/doesnt-exist"); - Assert.fail("Should throw FileNotFoundException"); - } catch (FileNotFoundException ex) { + Assert.fail("Should throw NoSuchFileException"); + } catch (NoSuchFileException ex) { // Exception as expected } } @@ -157,8 +157,8 @@ public void testRefreshHostFileReaderWithNonexistentFile() throws Exception { assertTrue(INCLUDES_FILE.delete()); try { hfp.refresh(); - Assert.fail("Should throw FileNotFoundException"); - } catch (FileNotFoundException ex) { + Assert.fail("Should throw NoSuchFileException"); + } catch (NoSuchFileException ex) { // Exception as expected } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java index be1f6d0d5a..91ab48fa9d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java @@ -24,10 +24,11 @@ import com.fasterxml.jackson.databind.ObjectReader; import java.io.File; -import java.io.FileInputStream; import java.io.InputStreamReader; import java.io.IOException; import java.io.Reader; +import java.nio.file.Files; +import java.nio.file.Paths; import java.util.ArrayList; import java.util.Iterator; import java.util.List; @@ -82,8 +83,8 @@ private CombinedHostsFileReader() { if (hostFile.length() > 0) { try (Reader input = - new InputStreamReader(new FileInputStream(hostFile), - "UTF-8")) { + new InputStreamReader( + Files.newInputStream(hostFile.toPath()), "UTF-8")) { allDNs = objectMapper.readValue(input, DatanodeAdminProperties[].class); } catch (JsonMappingException jme) { // The old format doesn't have json top-level token to enclose @@ -101,7 +102,7 @@ private CombinedHostsFileReader() { JsonFactory jsonFactory = new JsonFactory(); List all = new ArrayList<>(); try (Reader input = - new InputStreamReader(new FileInputStream(hostsFilePath), + new InputStreamReader(Files.newInputStream(Paths.get(hostsFilePath)), "UTF-8")) { Iterator iterator = objectReader.readValues(jsonFactory.createParser(input)); diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java index 2d9bbf5044..7897dc1ebf 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java @@ -18,11 +18,11 @@ package org.apache.hadoop.hdfs.util; -import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.Writer; - +import java.nio.file.Files; +import java.nio.file.Paths; import java.util.Set; import com.fasterxml.jackson.databind.ObjectMapper; @@ -61,7 +61,8 @@ public static void writeFile(final String hostsFile, final ObjectMapper objectMapper = new ObjectMapper(); try (Writer output = - new OutputStreamWriter(new FileOutputStream(hostsFile), "UTF-8")) { + new OutputStreamWriter(Files.newOutputStream(Paths.get(hostsFile)), + "UTF-8")) { objectMapper.writeValue(output, allDNs); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSAuthenticationFilter.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSAuthenticationFilter.java index a4e4385668..362b1b45a6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSAuthenticationFilter.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSAuthenticationFilter.java @@ -27,11 +27,12 @@ import javax.servlet.FilterConfig; import javax.servlet.ServletException; -import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Paths; import java.util.Map; import java.util.Properties; @@ -82,8 +83,8 @@ protected Properties getConfiguration(String configPrefix, try { StringBuilder secret = new StringBuilder(); - Reader reader = new InputStreamReader(new FileInputStream( - signatureSecretFile), StandardCharsets.UTF_8); + Reader reader = new InputStreamReader(Files.newInputStream(Paths.get( + signatureSecretFile)), StandardCharsets.UTF_8); int c = reader.read(); while (c > -1) { secret.append((char)c); diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java index 57f651a73b..f8b933b97f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java @@ -30,9 +30,9 @@ import org.slf4j.LoggerFactory; import java.io.File; -import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; +import java.nio.file.Files; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collections; @@ -470,7 +470,7 @@ protected void initConfig() throws ServerException { } try { log.debug("Loading site configuration from [{}]", siteFile); - inputStream = new FileInputStream(siteFile); + inputStream = Files.newInputStream(siteFile.toPath()); siteConf = new Configuration(false); ConfigurationUtils.load(siteConf, inputStream); } catch (IOException ex) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java index f0bdab1c00..aa0099efb8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java @@ -20,7 +20,6 @@ import com.google.protobuf.ByteString; import java.io.Closeable; import java.io.File; -import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStreamWriter; @@ -1058,7 +1057,7 @@ private PersistedRecoveryPaxosData getPersistedPaxosData(long segmentTxId) return null; } - InputStream in = new FileInputStream(f); + InputStream in = Files.newInputStream(f.toPath()); try { PersistedRecoveryPaxosData ret = PersistedRecoveryPaxosData.parseDelimitedFrom(in); Preconditions.checkState(ret != null && diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/aliasmap/InMemoryAliasMap.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/aliasmap/InMemoryAliasMap.java index eeee2a3c6b..8df27cd77e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/aliasmap/InMemoryAliasMap.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/aliasmap/InMemoryAliasMap.java @@ -53,9 +53,9 @@ import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; -import java.io.FileOutputStream; import java.io.IOException; import java.net.URI; +import java.nio.file.Files; import java.util.ArrayList; import java.util.Map; import java.util.Optional; @@ -324,7 +324,8 @@ private static File getCompressedAliasMap(File aliasMapDir) GzipCompressorOutputStream gzOut = null; TarArchiveOutputStream tOut = null; try { - bOut = new BufferedOutputStream(new FileOutputStream(outCompressedFile)); + bOut = new BufferedOutputStream( + Files.newOutputStream(outCompressedFile.toPath())); gzOut = new GzipCompressorOutputStream(bOut); tOut = new TarArchiveOutputStream(gzOut); addFileToTarGzRecursively(tOut, aliasMapDir, "", new Configuration()); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java index 4a4fef9d1f..6529409df1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java @@ -28,6 +28,7 @@ import java.io.OutputStreamWriter; import java.io.RandomAccessFile; import java.io.Writer; +import java.nio.file.Files; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -311,7 +312,7 @@ void saveDfsUsed() { try { long used = getDfsUsed(); try (Writer out = new OutputStreamWriter( - new FileOutputStream(outFile), "UTF-8")) { + Files.newOutputStream(outFile.toPath()), "UTF-8")) { // mtime is written last, so that truncated writes won't be valid. out.write(Long.toString(used) + " " + Long.toString(timer.now())); // This is only called as part of the volume shutdown. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java index 9cfdbc3253..47f0c0111f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java @@ -28,6 +28,7 @@ import java.io.InputStream; import java.nio.channels.ClosedChannelException; import java.nio.channels.FileChannel; +import java.nio.file.Files; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; @@ -1149,7 +1150,7 @@ static void computeChecksum(ReplicaInfo srcReplica, File dstMeta, } } metaOut = new DataOutputStream(new BufferedOutputStream( - new FileOutputStream(dstMeta), smallBufferSize)); + Files.newOutputStream(dstMeta.toPath()), smallBufferSize)); BlockMetadataHeader.writeHeader(metaOut, checksum); int offset = 0; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetUtil.java index 92c088860d..5308b60b59 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetUtil.java @@ -21,7 +21,6 @@ import java.io.DataOutputStream; import java.io.File; import java.io.FileDescriptor; -import java.io.FileInputStream; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; @@ -193,7 +192,7 @@ public URI getMetadataURI() { @Override public InputStream getDataInputStream(long seekOffset) throws IOException { - return new FileInputStream(blockFile); + return Files.newInputStream(blockFile.toPath()); } }; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java index eddef33690..4f8e373d9c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java @@ -54,12 +54,12 @@ import org.slf4j.LoggerFactory; import java.io.Closeable; -import java.io.FileNotFoundException; import java.io.IOException; import java.io.PrintStream; import java.net.InetSocketAddress; import java.net.URI; import java.net.URL; +import java.nio.file.NoSuchFileException; import java.nio.file.Paths; import java.text.SimpleDateFormat; import java.util.Collections; @@ -274,7 +274,7 @@ protected Set getNodeList(String listArg) throws IOException { try { HostsFileReader.readFileToSet("include", Paths.get(listURL.getPath()).toString(), resultSet); - } catch (FileNotFoundException e) { + } catch (NoSuchFileException e) { String warnMsg = String .format("The input host file path '%s' is not a valid path. " + "Please make sure the host file exists.", listArg); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileInputStream.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileInputStream.java index 7dd3d549fb..8bbdd42a6f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileInputStream.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileInputStream.java @@ -23,11 +23,11 @@ import java.io.DataInputStream; import java.io.EOFException; import java.io.File; -import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.net.HttpURLConnection; import java.net.URL; +import java.nio.file.Files; import java.security.PrivilegedExceptionAction; import org.slf4j.Logger; @@ -430,7 +430,7 @@ public FileLog(File file) { @Override public InputStream getInputStream() throws IOException { - return new FileInputStream(file); + return Files.newInputStream(file.toPath()); } @Override diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormat.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormat.java index ae2a037146..da067d15b6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormat.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormat.java @@ -23,9 +23,10 @@ import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.File; -import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; import java.security.DigestInputStream; import java.security.DigestOutputStream; import java.security.MessageDigest; @@ -215,9 +216,9 @@ public void load(File file, boolean requireSameLayoutVersion) throws IOException { Preconditions.checkState(impl == null, "Image already loaded!"); - FileInputStream is = null; + InputStream is = null; try { - is = new FileInputStream(file); + is = Files.newInputStream(file.toPath()); byte[] magic = new byte[FSImageUtil.MAGIC_HEADER.length]; IOUtils.readFully(is, magic, 0, magic.length); if (Arrays.equals(magic, FSImageUtil.MAGIC_HEADER)) { @@ -318,7 +319,7 @@ public void load(File curFile) throws IOException { // MessageDigest digester = MD5Hash.getDigester(); DigestInputStream fin = new DigestInputStream( - new FileInputStream(curFile), digester); + Files.newInputStream(curFile.toPath()), digester); DataInputStream in = new DataInputStream(fin); try { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImagePreTransactionalStorageInspector.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImagePreTransactionalStorageInspector.java index 1d97ace752..0f8515fd72 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImagePreTransactionalStorageInspector.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImagePreTransactionalStorageInspector.java @@ -20,8 +20,8 @@ import java.io.DataInputStream; import java.io.File; -import java.io.FileInputStream; import java.io.IOException; +import java.nio.file.Files; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; @@ -130,7 +130,8 @@ static long readCheckpointTime(StorageDirectory sd) throws IOException { File timeFile = NNStorage.getStorageFile(sd, NameNodeFile.TIME); long timeStamp = 0L; if (timeFile.exists() && FileUtil.canRead(timeFile)) { - DataInputStream in = new DataInputStream(new FileInputStream(timeFile)); + DataInputStream in = new DataInputStream( + Files.newInputStream(timeFile.toPath())); try { timeStamp = in.readLong(); in.close(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java index 938971951f..5c9341f82b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java @@ -117,7 +117,6 @@ import java.io.DataOutputStream; import java.io.File; import java.io.FileNotFoundException; -import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.PrintWriter; @@ -126,6 +125,7 @@ import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.URI; +import java.nio.file.Files; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -1774,7 +1774,8 @@ void metaSave(String filename) throws IOException { checkOperation(OperationCategory.READ); File file = new File(System.getProperty("hadoop.log.dir"), filename); PrintWriter out = new PrintWriter(new BufferedWriter( - new OutputStreamWriter(new FileOutputStream(file), Charsets.UTF_8))); + new OutputStreamWriter(Files.newOutputStream(file.toPath()), + Charsets.UTF_8))); metaSave(out); out.flush(); out.close(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DebugAdmin.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DebugAdmin.java index 0921f6adcd..9a654e580c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DebugAdmin.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DebugAdmin.java @@ -21,7 +21,6 @@ import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.File; -import java.io.FileOutputStream; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; @@ -29,6 +28,7 @@ import java.net.URISyntaxException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; +import java.nio.file.Files; import java.util.LinkedList; import java.util.List; import java.util.concurrent.TimeUnit; @@ -276,7 +276,7 @@ int run(List args) throws IOException { final int smallBufferSize = DFSUtilClient.getSmallBufferSize(conf); metaOut = new DataOutputStream( - new BufferedOutputStream(new FileOutputStream(srcMeta), + new BufferedOutputStream(Files.newOutputStream(srcMeta.toPath()), smallBufferSize)); BlockMetadataHeader.writeHeader(metaOut, checksum); metaOut.close(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java index de3acebd92..5fccbf1ab8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java @@ -17,9 +17,10 @@ */ package org.apache.hadoop.hdfs.tools.offlineEditsViewer; -import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; +import java.nio.file.Files; +import java.nio.file.Paths; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -48,7 +49,7 @@ static public OfflineEditsVisitor getEditsVisitor(String filename, return new BinaryEditsVisitor(filename); } OfflineEditsVisitor vis; - OutputStream fout = new FileOutputStream(filename); + OutputStream fout = Files.newOutputStream(Paths.get(filename)); OutputStream out = null; try { if (!printToScreen) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java index a97bb72fa7..89f6dcc6ce 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java @@ -31,10 +31,10 @@ import java.io.BufferedOutputStream; import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; import java.io.IOException; +import java.io.InputStream; import java.io.InputStreamReader; +import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.charset.Charset; import java.nio.file.Files; @@ -1821,16 +1821,16 @@ private void processXml() throws Exception { public static void run(String inputPath, String outputPath) throws Exception { MessageDigest digester = MD5Hash.getDigester(); - FileOutputStream fout = null; + OutputStream fout = null; File foutHash = new File(outputPath + ".md5"); Files.deleteIfExists(foutHash.toPath()); // delete any .md5 file that exists CountingOutputStream out = null; - FileInputStream fis = null; + InputStream fis = null; InputStreamReader reader = null; try { Files.deleteIfExists(Paths.get(outputPath)); - fout = new FileOutputStream(outputPath); - fis = new FileInputStream(inputPath); + fout = Files.newOutputStream(Paths.get(outputPath)); + fis = Files.newInputStream(Paths.get(inputPath)); reader = new InputStreamReader(fis, Charset.forName("UTF-8")); out = new CountingOutputStream( new DigestOutputStream( diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewer.java index dd50ab003c..0f544a35ee 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewer.java @@ -20,9 +20,9 @@ import java.io.BufferedInputStream; import java.io.DataInputStream; import java.io.EOFException; -import java.io.File; -import java.io.FileInputStream; import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; @@ -126,7 +126,7 @@ public void go() throws IOException { boolean done = false; try { tracker = new PositionTrackingInputStream(new BufferedInputStream( - new FileInputStream(new File(inputFile)))); + Files.newInputStream(Paths.get(inputFile)))); in = new DataInputStream(tracker); int imageVersionFile = findImageVersion(in); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TextWriterImageVisitor.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TextWriterImageVisitor.java index 972701b60c..7f837fd32d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TextWriterImageVisitor.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TextWriterImageVisitor.java @@ -17,9 +17,10 @@ */ package org.apache.hadoop.hdfs.tools.offlineImageViewer; -import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; +import java.nio.file.Files; +import java.nio.file.Paths; import com.google.common.base.Charsets; @@ -59,7 +60,8 @@ public TextWriterImageVisitor(String filename, boolean printToScreen) throws IOException { super(); this.printToScreen = printToScreen; - fw = new OutputStreamWriter(new FileOutputStream(filename), Charsets.UTF_8); + fw = new OutputStreamWriter(Files.newOutputStream(Paths.get(filename)), + Charsets.UTF_8); okToWrite = true; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/MD5FileUtils.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/MD5FileUtils.java index 95dcf7181c..b541b14f64 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/MD5FileUtils.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/MD5FileUtils.java @@ -19,11 +19,11 @@ import java.io.BufferedReader; import java.io.File; -import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; +import java.nio.file.Files; import java.security.DigestInputStream; import java.security.MessageDigest; import java.util.regex.Matcher; @@ -74,8 +74,8 @@ public static void verifySavedMD5(File dataFile, MD5Hash expectedMD5) */ private static Matcher readStoredMd5(File md5File) throws IOException { BufferedReader reader = - new BufferedReader(new InputStreamReader(new FileInputStream( - md5File), Charsets.UTF_8)); + new BufferedReader(new InputStreamReader( + Files.newInputStream(md5File.toPath()), Charsets.UTF_8)); String md5Line; try { md5Line = reader.readLine(); @@ -125,7 +125,7 @@ public static MD5Hash readStoredMd5ForFile(File dataFile) throws IOException { * Read dataFile and compute its MD5 checksum. */ public static MD5Hash computeMd5ForFile(File dataFile) throws IOException { - InputStream in = new FileInputStream(dataFile); + InputStream in = Files.newInputStream(dataFile.toPath()); try { MessageDigest digester = MD5Hash.getDigester(); DigestInputStream dis = new DigestInputStream(in, digester); diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/RegexCopyFilter.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/RegexCopyFilter.java index d91b28253d..2f4ea69e8f 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/RegexCopyFilter.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/RegexCopyFilter.java @@ -25,12 +25,12 @@ import java.io.BufferedReader; import java.io.File; -import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.nio.charset.Charset; +import java.nio.file.Files; import java.util.ArrayList; import java.util.List; import java.util.regex.Pattern; @@ -63,7 +63,7 @@ protected RegexCopyFilter(String filtersFilename) { public void initialize() { BufferedReader reader = null; try { - InputStream is = new FileInputStream(filtersFile); + InputStream is = Files.newInputStream(filtersFile.toPath()); reader = new BufferedReader(new InputStreamReader(is, Charset.forName("UTF-8"))); String line;