diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java index bf68f96a6d..2935e6ae32 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java @@ -171,7 +171,8 @@ public class FsVolumeImpl implements FsVolumeSpi { this.usage = usage; if (this.usage != null) { reserved = new ReservedSpaceCalculator.Builder(conf) - .setUsage(this.usage).setStorageType(storageType).build(); + .setUsage(this.usage).setStorageType(storageType) + .setDir(currentDir != null ? currentDir.getParent() : "NULL").build(); boolean fixedSizeVolume = conf.getBoolean( DFSConfigKeys.DFS_DATANODE_FIXED_VOLUME_SIZE_KEY, DFSConfigKeys.DFS_DATANODE_FIXED_VOLUME_SIZE_DEFAULT); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ReservedSpaceCalculator.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ReservedSpaceCalculator.java index 749e16e659..0ab4032104 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ReservedSpaceCalculator.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ReservedSpaceCalculator.java @@ -46,6 +46,8 @@ public static class Builder { private DF usage; private StorageType storageType; + private String dir; + public Builder(Configuration conf) { this.conf = conf; } @@ -61,6 +63,11 @@ public Builder setStorageType( return this; } + public Builder setDir(String newDir) { + this.dir = newDir; + return this; + } + ReservedSpaceCalculator build() { try { Class clazz = conf.getClass( @@ -69,10 +76,10 @@ ReservedSpaceCalculator build() { ReservedSpaceCalculator.class); Constructor constructor = clazz.getConstructor( - Configuration.class, DF.class, StorageType.class); + Configuration.class, DF.class, StorageType.class, String.class); return (ReservedSpaceCalculator) constructor.newInstance( - conf, usage, storageType); + conf, usage, storageType, dir); } catch (Exception e) { throw new IllegalStateException( "Error instantiating ReservedSpaceCalculator", e); @@ -84,20 +91,30 @@ ReservedSpaceCalculator build() { private final Configuration conf; private final StorageType storageType; + private final String dir; + ReservedSpaceCalculator(Configuration conf, DF usage, - StorageType storageType) { + StorageType storageType, String dir) { this.usage = usage; this.conf = conf; this.storageType = storageType; + this.dir = dir; } DF getUsage() { return usage; } + String getDir() { + return dir; + } + long getReservedFromConf(String key, long defaultValue) { - return conf.getLong(key + "." + StringUtils.toLowerCase( - storageType.toString()), conf.getLongBytes(key, defaultValue)); + return conf.getLong( + key + "." + getDir() + "." + StringUtils.toLowerCase(storageType.toString()), + conf.getLong(key + "." + getDir(), + conf.getLong(key + "." + StringUtils.toLowerCase(storageType.toString()), + conf.getLongBytes(key, defaultValue)))); } /** @@ -117,8 +134,8 @@ public static class ReservedSpaceCalculatorAbsolute extends private final long reservedBytes; public ReservedSpaceCalculatorAbsolute(Configuration conf, DF usage, - StorageType storageType) { - super(conf, usage, storageType); + StorageType storageType, String dir) { + super(conf, usage, storageType, dir); this.reservedBytes = getReservedFromConf(DFS_DATANODE_DU_RESERVED_KEY, DFS_DATANODE_DU_RESERVED_DEFAULT); } @@ -138,8 +155,8 @@ public static class ReservedSpaceCalculatorPercentage extends private final long reservedPct; public ReservedSpaceCalculatorPercentage(Configuration conf, DF usage, - StorageType storageType) { - super(conf, usage, storageType); + StorageType storageType, String dir) { + super(conf, usage, storageType, dir); this.reservedPct = getReservedFromConf( DFS_DATANODE_DU_RESERVED_PERCENTAGE_KEY, DFS_DATANODE_DU_RESERVED_PERCENTAGE_DEFAULT); @@ -162,8 +179,8 @@ public static class ReservedSpaceCalculatorConservative extends private final long reservedPct; public ReservedSpaceCalculatorConservative(Configuration conf, DF usage, - StorageType storageType) { - super(conf, usage, storageType); + StorageType storageType, String dir) { + super(conf, usage, storageType, dir); this.reservedBytes = getReservedFromConf(DFS_DATANODE_DU_RESERVED_KEY, DFS_DATANODE_DU_RESERVED_DEFAULT); this.reservedPct = getReservedFromConf( @@ -197,8 +214,8 @@ public static class ReservedSpaceCalculatorAggressive extends private final long reservedPct; public ReservedSpaceCalculatorAggressive(Configuration conf, DF usage, - StorageType storageType) { - super(conf, usage, storageType); + StorageType storageType, String dir) { + super(conf, usage, storageType, dir); this.reservedBytes = getReservedFromConf(DFS_DATANODE_DU_RESERVED_KEY, DFS_DATANODE_DU_RESERVED_DEFAULT); this.reservedPct = getReservedFromConf( diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml b/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml index 52075a24f1..3e9f54f58f 100755 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml @@ -397,12 +397,19 @@ dfs.datanode.du.reserved 0 Reserved space in bytes per volume. Always leave this much space free for non dfs use. + Specific directory based reservation is supported. The property can be followed with directory + name which is set at 'dfs.datanode.data.dir'. For example, reserved space for /data/hdfs1/data + can be configured using property 'dfs.datanode.du.reserved./data/hdfs1/data'. If specific directory + reservation is not configured then dfs.datanode.du.reserved will be used. Specific storage type based reservation is also supported. The property can be followed with corresponding storage types ([ssd]/[disk]/[archive]/[ram_disk]/[nvdimm]) for cluster with heterogeneous storage. For example, reserved space for RAM_DISK storage can be configured using property 'dfs.datanode.du.reserved.ram_disk'. If specific storage type reservation is not configured then dfs.datanode.du.reserved will be used. Support multiple size unit suffix(case insensitive), - as described in dfs.blocksize. + as described in dfs.blocksize. Use directory name and storage type based reservation at the + same time is also allowed if both are configured. + Property priority example: dfs.datanode.du.reserved./data/hdfs1/data.ram_disk > + dfs.datanode.du.reserved./data/hdfs1/data > dfs.datanode.du.reserved.ram_disk > dfs.datanode.du.reserved Note: In case of using tune2fs to set reserved-blocks-percentage, or other filesystem tools, then you can possibly run into out of disk errors because hadoop will not check those external tool configurations. @@ -414,12 +421,19 @@ 0 Reserved space in percentage. Read dfs.datanode.du.reserved.calculator to see when this takes effect. The actual number of bytes reserved will be calculated by using the - total capacity of the data directory in question. Specific storage type based reservation + total capacity of the data directory in question. Specific directory based reservation is + supported. The property can be followed with directory name which is set at 'dfs.datanode.data.dir'. + For example, reserved percentage space for /data/hdfs1/data can be configured using property + 'dfs.datanode.du.reserved.pct./data/hdfs1/data'. If specific directory reservation is not + configured then dfs.datanode.du.reserved.pct will be used. Specific storage type based reservation is also supported. The property can be followed with corresponding storage types ([ssd]/[disk]/[archive]/[ram_disk]/[nvdimm]) for cluster with heterogeneous storage. For example, reserved percentage space for RAM_DISK storage can be configured using property 'dfs.datanode.du.reserved.pct.ram_disk'. If specific storage type reservation is not configured - then dfs.datanode.du.reserved.pct will be used. + then dfs.datanode.du.reserved.pct will be used. Use directory and storage type based reservation + is also allowed if both are configured. + Priority example: dfs.datanode.du.reserved.pct./data/hdfs1/data.ram_disk > dfs.datanode.du.reserved.pct./data/hdfs1/data + > dfs.datanode.du.reserved.pct.ram_disk > dfs.datanode.du.reserved.pct diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestReservedSpaceCalculator.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestReservedSpaceCalculator.java index fa666f2a69..1013686396 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestReservedSpaceCalculator.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestReservedSpaceCalculator.java @@ -168,6 +168,55 @@ public void testReservedSpaceAggresivePerStorageType() { checkReserved(StorageType.ARCHIVE, 100000, 5000); } + @Test + public void testReservedSpaceAbsolutePerDir() { + conf.setClass(DFS_DATANODE_DU_RESERVED_CALCULATOR_KEY, ReservedSpaceCalculatorAbsolute.class, + ReservedSpaceCalculator.class); + + String dir1 = "/data/hdfs1/data"; + String dir2 = "/data/hdfs2/data"; + String dir3 = "/data/hdfs3/data"; + + conf.setLong(DFS_DATANODE_DU_RESERVED_KEY + "." + dir1 + ".ssd", 900); + conf.setLong(DFS_DATANODE_DU_RESERVED_KEY + "." + dir1, 1800); + conf.setLong(DFS_DATANODE_DU_RESERVED_KEY + "." + dir2, 2700); + conf.setLong(DFS_DATANODE_DU_RESERVED_KEY + ".ssd", 3600); + conf.setLong(DFS_DATANODE_DU_RESERVED_KEY, 4500); + + checkReserved(StorageType.SSD, 10000, 900, dir1); + checkReserved(StorageType.DISK, 10000, 1800, dir1); + checkReserved(StorageType.SSD, 10000, 2700, dir2); + checkReserved(StorageType.DISK, 10000, 2700, dir2); + checkReserved(StorageType.SSD, 10000, 3600, dir3); + checkReserved(StorageType.DISK, 10000, 4500, dir3); + } + + @Test + public void testReservedSpacePercentagePerDir() { + conf.setClass(DFS_DATANODE_DU_RESERVED_CALCULATOR_KEY, + ReservedSpaceCalculatorPercentage.class, + ReservedSpaceCalculator.class); + + String dir1 = "/data/hdfs1/data"; + String dir2 = "/data/hdfs2/data"; + String dir3 = "/data/hdfs3/data"; + + // Set percentage reserved values for different directories + conf.setLong(DFS_DATANODE_DU_RESERVED_PERCENTAGE_KEY + "." + dir1 + ".ssd", 20); + conf.setLong(DFS_DATANODE_DU_RESERVED_PERCENTAGE_KEY + "." + dir1, 10); + conf.setLong(DFS_DATANODE_DU_RESERVED_PERCENTAGE_KEY + "." + dir2, 25); + conf.setLong(DFS_DATANODE_DU_RESERVED_PERCENTAGE_KEY + ".ssd", 30); + conf.setLong(DFS_DATANODE_DU_RESERVED_PERCENTAGE_KEY, 40); + + // Verify reserved space calculations for different directories and storage types + checkReserved(StorageType.SSD, 10000, 2000, dir1); + checkReserved(StorageType.DISK, 10000, 1000, dir1); + checkReserved(StorageType.SSD, 10000, 2500, dir2); + checkReserved(StorageType.DISK, 10000, 2500, dir2); + checkReserved(StorageType.SSD, 10000, 3000, dir3); + checkReserved(StorageType.DISK, 10000, 4000, dir3); + } + @Test(expected = IllegalStateException.class) public void testInvalidCalculator() { conf.set(DFS_DATANODE_DU_RESERVED_CALCULATOR_KEY, "INVALIDTYPE"); @@ -179,10 +228,15 @@ public void testInvalidCalculator() { private void checkReserved(StorageType storageType, long totalCapacity, long reservedExpected) { + checkReserved(storageType, totalCapacity, reservedExpected, "NULL"); + } + + private void checkReserved(StorageType storageType, + long totalCapacity, long reservedExpected, String dir) { when(usage.getCapacity()).thenReturn(totalCapacity); reserved = new ReservedSpaceCalculator.Builder(conf).setUsage(usage) - .setStorageType(storageType).build(); + .setStorageType(storageType).setDir(dir).build(); assertEquals(reservedExpected, reserved.getReserved()); } } \ No newline at end of file