diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 7a78031f3e..ebc0e96187 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -629,6 +629,8 @@ Release 2.2.1 - UNRELEASED HDFS-5344. Make LsSnapshottableDir as Tool interface implementation. (Sathish via umamahesh) + HDFS-5544. Adding Test case For Checking dfs.checksum type as NULL value. (Sathish via umamahesh) + OPTIMIZATIONS BUG FIXES diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFSOutputSummer.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFSOutputSummer.java index a3b3f808eb..88a83715ff 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFSOutputSummer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFSOutputSummer.java @@ -135,4 +135,25 @@ public void testFSOutputSummer() throws Exception { cluster.shutdown(); } } + + @Test + public void TestDFSCheckSumType() throws Exception{ + Configuration conf = new HdfsConfiguration(); + conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, BLOCK_SIZE); + conf.setInt(DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, BYTES_PER_CHECKSUM); + conf.set(DFSConfigKeys.DFS_CHECKSUM_TYPE_KEY, "NULL"); + MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf) + .numDataNodes(NUM_OF_DATANODES) + .build(); + fileSys = cluster.getFileSystem(); + try { + Path file = new Path("try.dat"); + Random rand = new Random(seed); + rand.nextBytes(expected); + writeFile1(file); + } finally { + fileSys.close(); + cluster.shutdown(); + } + } }