diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java index dc9197bbeb..8ad9d6560f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java @@ -419,6 +419,15 @@ public class DFSConfigKeys extends CommonConfigurationKeys { "dfs.namenode.snapshot.max.limit"; public static final int DFS_NAMENODE_SNAPSHOT_MAX_LIMIT_DEFAULT = 65536; + public static final String DFS_NAMENODE_SNAPSHOT_SKIPLIST_SKIP_INTERVAL = + "dfs.namenode.snapshot.skiplist.interval"; + public static final int DFS_NAMENODE_SNAPSHOT_SKIPLIST_SKIP_INTERVAL_DEFAULT = + 10; + public static final String DFS_NAMENODE_SNAPSHOT_SKIPLIST_MAX_LEVELS = + "dfs.namenode.snapshot.skiplist.max.levels"; + public static final int + DFS_NAMENODE_SNAPSHOT_SKIPLIST_MAX_SKIP_LEVELS_DEFAULT = 0; + // Whether to enable datanode's stale state detection and usage for reads public static final String DFS_NAMENODE_AVOID_STALE_DATANODE_FOR_READ_KEY = "dfs.namenode.avoid.read.stale.datanode"; public static final boolean DFS_NAMENODE_AVOID_STALE_DATANODE_FOR_READ_DEFAULT = false; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/AbstractINodeDiffList.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/AbstractINodeDiffList.java index 8f2465a7ad..5163e5ece5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/AbstractINodeDiffList.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/AbstractINodeDiffList.java @@ -138,10 +138,14 @@ public final D getLast() { return n == 0 ? null : diffs.get(n - 1); } + DiffList newDiffs() { + return new DiffListByArrayList<>( + INodeDirectory.DEFAULT_FILES_PER_DIRECTORY); + } + private void createDiffsIfNeeded() { if (diffs == null) { - diffs = - new DiffListByArrayList<>(INodeDirectory.DEFAULT_FILES_PER_DIRECTORY); + diffs = newDiffs(); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryDiffList.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DiffListBySkipList.java similarity index 98% rename from hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryDiffList.java rename to hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DiffListBySkipList.java index 58930e27d8..0d24a32a00 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryDiffList.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DiffListBySkipList.java @@ -68,9 +68,9 @@ *

* Once a snapshot gets deleted, the list needs to be balanced. */ -public class DirectoryDiffList implements DiffList { +public class DiffListBySkipList implements DiffList { public static final Logger LOG = - LoggerFactory.getLogger(DirectoryDiffList.class); + LoggerFactory.getLogger(DiffListBySkipList.class); private static class SkipDiff { /** @@ -240,7 +240,7 @@ public String toString() { /** * Constructs a new, empty instance of SkipList. */ - public DirectoryDiffList(int capacity, int interval, int skipLevel) { + public DiffListBySkipList(int capacity, int interval, int skipLevel) { skipNodeList = new ArrayList<>(capacity); head = new SkipListNode(null, 0); this.maxSkipLevels = skipLevel; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryDiffListFactory.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryDiffListFactory.java new file mode 100644 index 0000000000..e77d46862b --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryDiffListFactory.java @@ -0,0 +1,46 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdfs.server.namenode.snapshot; + +import org.apache.commons.logging.Log; +import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiff; + +import java.util.function.IntFunction; + +/** For creating {@link DiffList} for {@link DirectoryDiff}. */ +public abstract class DirectoryDiffListFactory { + public static DiffList createDiffList(int capacity) { + return constructor.apply(capacity); + } + + public static void init(int skipInterval, int maxLevels, Log log) { + if (maxLevels > 0) { + constructor = c -> new DiffListBySkipList(c, skipInterval, maxLevels); + log.info("SkipList is enabled with skipInterval=" + skipInterval + + ", maxLevels=" + maxLevels); + } else { + constructor = c -> new DiffListByArrayList<>(c); + log.info("SkipList is disabled"); + } + } + + private static volatile IntFunction> constructor + = c -> new DiffListByArrayList<>(c); + + private DirectoryDiffListFactory() {} +} diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryWithSnapshotFeature.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryWithSnapshotFeature.java index eecbe11c7b..b3ce602463 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryWithSnapshotFeature.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryWithSnapshotFeature.java @@ -334,6 +334,12 @@ INodeDirectoryAttributes createSnapshotCopy(INodeDirectory currentDir) { : new INodeDirectoryAttributes.SnapshotCopy(currentDir); } + @Override + DiffList newDiffs() { + return DirectoryDiffListFactory + .createDiffList(INodeDirectory.DEFAULT_FILES_PER_DIRECTORY); + } + /** Replace the given child in the created/deleted list, if there is any. */ public boolean replaceChild(final ListType type, final INode oldChild, final INode newChild) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotManager.java index 7cb45dcc90..038ad3c5a2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotManager.java @@ -127,6 +127,14 @@ public SnapshotManager(final Configuration conf, final FSDirectory fsdir) { + snapshotDiffAllowSnapRootDescendant + ", maxSnapshotLimit: " + maxSnapshotLimit); + + final int maxLevels = conf.getInt( + DFSConfigKeys.DFS_NAMENODE_SNAPSHOT_SKIPLIST_MAX_LEVELS, + DFSConfigKeys.DFS_NAMENODE_SNAPSHOT_SKIPLIST_MAX_SKIP_LEVELS_DEFAULT); + final int skipInterval = conf.getInt( + DFSConfigKeys.DFS_NAMENODE_SNAPSHOT_SKIPLIST_SKIP_INTERVAL, + DFSConfigKeys.DFS_NAMENODE_SNAPSHOT_SKIPLIST_SKIP_INTERVAL_DEFAULT); + DirectoryDiffListFactory.init(skipInterval, maxLevels, LOG); } @VisibleForTesting diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml b/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml index b2da5a0897..2d3c5e7baf 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml @@ -4362,7 +4362,6 @@ across to the client within one rpc call. - dfs.namenode.snapshot.max.limit 65536 @@ -4373,6 +4372,24 @@ + + dfs.namenode.snapshot.skiplist.max.levels + 0 + + Maximum no of the skip levels to be maintained in the skip list for + storing directory snapshot diffs. By default, it is set to 0 and a linear + list will be used to store the directory snapshot diffs. + + + + dfs.namenode.snapshot.skiplist.interval + 10 + + The interval after which the skip levels will be formed in the skip list + for storing directory snapshot diffs. By default, value is set to 10. + + + dfs.pipeline.ecn false diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestDirectoryDiffList.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestDiffListBySkipList.java similarity index 92% rename from hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestDirectoryDiffList.java rename to hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestDiffListBySkipList.java index e493e4b4af..8062951a4f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestDirectoryDiffList.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestDiffListBySkipList.java @@ -41,7 +41,7 @@ /** * This class tests the DirectoryDiffList API's. */ -public class TestDirectoryDiffList{ +public class TestDiffListBySkipList { static final int NUM_SNAPSHOTS = 100; static { SnapshotTestHelper.disableLogs(); @@ -78,7 +78,7 @@ static void assertList(List expected, List computed) { } } - static void verifyChildrenList(DirectoryDiffList skip, INodeDirectory dir) { + static void verifyChildrenList(DiffListBySkipList skip, INodeDirectory dir) { final int n = skip.size(); for (int i = 0; i < skip.size(); i++) { final List expected = ReadOnlyList.Util.asList( @@ -95,7 +95,7 @@ static void verifyChildrenList(DirectoryDiffList skip, INodeDirectory dir) { } static void verifyChildrenList( - DiffList array, DirectoryDiffList skip, + DiffList array, DiffListBySkipList skip, INodeDirectory dir, List childrenList) { final int n = array.size(); Assert.assertEquals(n, skip.size()); @@ -144,13 +144,13 @@ public void testAddLast() throws Exception { static void testAddLast(int n) throws Exception { final Path root = new Path("/testAddLast" + n); - DirectoryDiffList.LOG.info("run " + root); + DiffListBySkipList.LOG.info("run " + root); - final DirectoryDiffList skipList = new DirectoryDiffList(0, 3, 5); + final DiffListBySkipList skipList = new DiffListBySkipList(0, 3, 5); final DiffList arrayList = new DiffListByArrayList<>(0); INodeDirectory dir = addDiff(n, skipList, arrayList, root); // verify that the both the children list obtained from hdfs and - // DirectoryDiffList are same + // DiffListBySkipList are same verifyChildrenList(skipList, dir); verifyChildrenList(arrayList, skipList, dir, Collections.emptyList()); } @@ -163,7 +163,7 @@ public void testAddFirst() throws Exception { static void testAddFirst(int n) throws Exception { final Path root = new Path("/testAddFirst" + n); - DirectoryDiffList.LOG.info("run " + root); + DiffListBySkipList.LOG.info("run " + root); hdfs.mkdirs(root); for (int i = 1; i < n; i++) { @@ -180,7 +180,7 @@ static void testAddFirst(int n) throws Exception { DiffList diffs = dir.getDiffs().asList(); List childrenList = ReadOnlyList.Util.asList(dir.getChildrenList( diffs.get(0).getSnapshotId())); - final DirectoryDiffList skipList = new DirectoryDiffList(0, 3, 5); + final DiffListBySkipList skipList = new DiffListBySkipList(0, 3, 5); final DiffList arrayList = new DiffListByArrayList<>(0); for (int i = diffs.size() - 1; i >= 0; i--) { final DirectoryDiff d = diffs.get(i); @@ -188,7 +188,7 @@ static void testAddFirst(int n) throws Exception { arrayList.addFirst(d); } // verify that the both the children list obtained from hdfs and - // DirectoryDiffList are same + // DiffListBySkipList are same verifyChildrenList(skipList, dir); verifyChildrenList(arrayList, skipList, dir, childrenList); } @@ -231,9 +231,9 @@ public void testRemoveRandom() throws Exception { static void testRemove(String name, int n, IntFunction indexFunction) throws Exception { final Path root = new Path("/testRemove" + name + n); - DirectoryDiffList.LOG.info("run " + root); + DiffListBySkipList.LOG.info("run " + root); - final DirectoryDiffList skipList = new DirectoryDiffList(0, 3, 5); + final DiffListBySkipList skipList = new DiffListBySkipList(0, 3, 5); final DiffList arrayList = new DiffListByArrayList<>(0); final INodeDirectory dir = addDiff(n, skipList, arrayList, root); Assert.assertEquals(n, arrayList.size()); @@ -248,9 +248,9 @@ static void testRemove(String name, int n, IntFunction indexFunction) } } - static DirectoryDiff remove(int i, DirectoryDiffList skip, + static DirectoryDiff remove(int i, DiffListBySkipList skip, DiffList array) { - DirectoryDiffList.LOG.info("remove " + i); + DiffListBySkipList.LOG.info("remove " + i); final DirectoryDiff expected = array.remove(i); final DirectoryDiff computed = skip.remove(i); assertDirectoryDiff(expected, computed);