diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
index fce21949be..f8e4638823 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
@@ -109,6 +109,9 @@
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
+import static org.apache.commons.lang3.StringUtils.isBlank;
+import static org.apache.commons.lang3.StringUtils.isNotBlank;
+
/**
* Provides access to configuration parameters.
*
@@ -1818,6 +1821,83 @@ public long[] getTimeDurations(String name, TimeUnit unit) {
}
return durations;
}
+ /**
+ * Gets the Storage Size from the config, or returns the defaultValue. The
+ * unit of return value is specified in target unit.
+ *
+ * @param name - Key Name
+ * @param defaultValue - Default Value -- e.g. 100MB
+ * @param targetUnit - The units that we want result to be in.
+ * @return double -- formatted in target Units
+ */
+ public double getStorageSize(String name, String defaultValue,
+ StorageUnit targetUnit) {
+ Preconditions.checkState(isNotBlank(name), "Key cannot be blank.");
+ String vString = get(name);
+ if (isBlank(vString)) {
+ vString = defaultValue;
+ }
+
+ // Please note: There is a bit of subtlety here. If the user specifies
+ // the default unit as "1GB", but the requested unit is MB, we will return
+ // the format in MB even thought the default string is specified in GB.
+
+ // Converts a string like "1GB" to to unit specified in targetUnit.
+
+ StorageSize measure = StorageSize.parse(vString);
+ return convertStorageUnit(measure.getValue(), measure.getUnit(),
+ targetUnit);
+ }
+
+ /**
+ * Gets storage size from a config file.
+ *
+ * @param name - Key to read.
+ * @param defaultValue - The default value to return in case the key is
+ * not present.
+ * @param targetUnit - The Storage unit that should be used
+ * for the return value.
+ * @return - double value in the Storage Unit specified.
+ */
+ public double getStorageSize(String name, double defaultValue,
+ StorageUnit targetUnit) {
+ Preconditions.checkNotNull(targetUnit, "Conversion unit cannot be null.");
+ Preconditions.checkState(isNotBlank(name), "Name cannot be blank.");
+ String vString = get(name);
+ if (isBlank(vString)) {
+ return targetUnit.getDefault(defaultValue);
+ }
+
+ StorageSize measure = StorageSize.parse(vString);
+ return convertStorageUnit(measure.getValue(), measure.getUnit(),
+ targetUnit);
+
+ }
+
+ /**
+ * Sets Storage Size for the specified key.
+ *
+ * @param name - Key to set.
+ * @param value - The numeric value to set.
+ * @param unit - Storage Unit to be used.
+ */
+ public void setStorageSize(String name, double value, StorageUnit unit) {
+ set(name, value + unit.getShortName());
+ }
+
+ /**
+ * convert the value from one storage unit to another.
+ *
+ * @param value - value
+ * @param sourceUnit - Source unit to convert from
+ * @param targetUnit - target unit.
+ * @return double.
+ */
+ private double convertStorageUnit(double value, StorageUnit sourceUnit,
+ StorageUnit targetUnit) {
+ double byteValue = sourceUnit.toBytes(value);
+ return targetUnit.fromBytes(byteValue);
+ }
/**
* Get the value of the name
property as a Pattern
.
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/StorageSize.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/StorageSize.java
new file mode 100644
index 0000000000..6cad6f7087
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/StorageSize.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ *
+ */
+
+package org.apache.hadoop.conf;
+
+import java.util.Locale;
+
+import static org.apache.commons.lang3.StringUtils.isNotBlank;
+
+/**
+ * A class that contains the numeric value and the unit of measure.
+ */
+public class StorageSize {
+ private final StorageUnit unit;
+ private final double value;
+
+ /**
+ * Constucts a Storage Measure, which contains the value and the unit of
+ * measure.
+ *
+ * @param unit - Unit of Measure
+ * @param value - Numeric value.
+ */
+ public StorageSize(StorageUnit unit, double value) {
+ this.unit = unit;
+ this.value = value;
+ }
+
+ private static void checkState(boolean state, String errorString){
+ if(!state) {
+ throw new IllegalStateException(errorString);
+ }
+ }
+
+ public static StorageSize parse(String value) {
+ checkState(isNotBlank(value), "value cannot be blank");
+ String sanitizedValue = value.trim().toLowerCase(Locale.ENGLISH);
+ StorageUnit parsedUnit = null;
+ for (StorageUnit unit : StorageUnit.values()) {
+ if (sanitizedValue.endsWith(unit.getShortName()) ||
+ sanitizedValue.endsWith(unit.getLongName()) ||
+ sanitizedValue.endsWith(unit.getSuffixChar())) {
+ parsedUnit = unit;
+ break;
+ }
+ }
+
+ if (parsedUnit == null) {
+ throw new IllegalArgumentException(value + " is not in expected format." +
+ "Expected format is . e.g. 1000MB");
+ }
+
+
+ String suffix = "";
+ boolean found = false;
+
+ // We are trying to get the longest match first, so the order of
+ // matching is getLongName, getShortName and then getSuffixChar.
+ if (!found && sanitizedValue.endsWith(parsedUnit.getLongName())) {
+ found = true;
+ suffix = parsedUnit.getLongName();
+ }
+
+ if (!found && sanitizedValue.endsWith(parsedUnit.getShortName())) {
+ found = true;
+ suffix = parsedUnit.getShortName();
+ }
+
+ if (!found && sanitizedValue.endsWith(parsedUnit.getSuffixChar())) {
+ found = true;
+ suffix = parsedUnit.getSuffixChar();
+ }
+
+ checkState(found, "Something is wrong, we have to find a " +
+ "match. Internal error.");
+
+ String valString =
+ sanitizedValue.substring(0, value.length() - suffix.length());
+ return new StorageSize(parsedUnit, Double.parseDouble(valString));
+
+ }
+
+ public StorageUnit getUnit() {
+ return unit;
+ }
+
+ public double getValue() {
+ return value;
+ }
+
+}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/StorageUnit.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/StorageUnit.java
new file mode 100644
index 0000000000..fe3c6f866b
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/StorageUnit.java
@@ -0,0 +1,530 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ *
+ */
+
+package org.apache.hadoop.conf;
+
+import java.math.BigDecimal;
+import java.math.RoundingMode;
+
+/**
+ * Class that maintains different forms of Storage Units.
+ */
+public enum StorageUnit {
+ /*
+ We rely on BYTES being the last to get longest matching short names first.
+ The short name of bytes is b and it will match with other longer names.
+
+ if we change this order, the corresponding code in
+ Configuration#parseStorageUnit needs to be changed too, since values()
+ call returns the Enums in declared order and we depend on it.
+ */
+
+ EB {
+ @Override
+ public double toBytes(double value) {
+ return multiply(value, EXABYTES);
+ }
+
+ @Override
+ public double toKBs(double value) {
+ return multiply(value, EXABYTES / KILOBYTES);
+ }
+
+ @Override
+ public double toMBs(double value) {
+ return multiply(value, EXABYTES / MEGABYTES);
+ }
+
+ @Override
+ public double toGBs(double value) {
+ return multiply(value, EXABYTES / GIGABYTES);
+ }
+
+ @Override
+ public double toTBs(double value) {
+ return multiply(value, EXABYTES / TERABYTES);
+ }
+
+ @Override
+ public double toPBs(double value) {
+ return multiply(value, EXABYTES / PETABYTES);
+ }
+
+ @Override
+ public double toEBs(double value) {
+ return value;
+ }
+
+ @Override
+ public String getLongName() {
+ return "exabytes";
+ }
+
+ @Override
+ public String getShortName() {
+ return "eb";
+ }
+
+ @Override
+ public String getSuffixChar() {
+ return "e";
+ }
+
+ @Override
+ public double getDefault(double value) {
+ return toEBs(value);
+ }
+
+ @Override
+ public double fromBytes(double value) {
+ return divide(value, EXABYTES);
+ }
+ },
+ PB {
+ @Override
+ public double toBytes(double value) {
+ return multiply(value, PETABYTES);
+ }
+
+ @Override
+ public double toKBs(double value) {
+ return multiply(value, PETABYTES / KILOBYTES);
+ }
+
+ @Override
+ public double toMBs(double value) {
+ return multiply(value, PETABYTES / MEGABYTES);
+ }
+
+ @Override
+ public double toGBs(double value) {
+ return multiply(value, PETABYTES / GIGABYTES);
+ }
+
+ @Override
+ public double toTBs(double value) {
+ return multiply(value, PETABYTES / TERABYTES);
+ }
+
+ @Override
+ public double toPBs(double value) {
+ return value;
+ }
+
+ @Override
+ public double toEBs(double value) {
+ return divide(value, EXABYTES / PETABYTES);
+ }
+
+ @Override
+ public String getLongName() {
+ return "petabytes";
+ }
+
+ @Override
+ public String getShortName() {
+ return "pb";
+ }
+
+ @Override
+ public String getSuffixChar() {
+ return "p";
+ }
+
+ @Override
+ public double getDefault(double value) {
+ return toPBs(value);
+ }
+
+ @Override
+ public double fromBytes(double value) {
+ return divide(value, PETABYTES);
+ }
+ },
+ TB {
+ @Override
+ public double toBytes(double value) {
+ return multiply(value, TERABYTES);
+ }
+
+ @Override
+ public double toKBs(double value) {
+ return multiply(value, TERABYTES / KILOBYTES);
+ }
+
+ @Override
+ public double toMBs(double value) {
+ return multiply(value, TERABYTES / MEGABYTES);
+ }
+
+ @Override
+ public double toGBs(double value) {
+ return multiply(value, TERABYTES / GIGABYTES);
+ }
+
+ @Override
+ public double toTBs(double value) {
+ return value;
+ }
+
+ @Override
+ public double toPBs(double value) {
+ return divide(value, PETABYTES / TERABYTES);
+ }
+
+ @Override
+ public double toEBs(double value) {
+ return divide(value, EXABYTES / TERABYTES);
+ }
+
+ @Override
+ public String getLongName() {
+ return "terabytes";
+ }
+
+ @Override
+ public String getShortName() {
+ return "tb";
+ }
+
+ @Override
+ public String getSuffixChar() {
+ return "t";
+ }
+
+ @Override
+ public double getDefault(double value) {
+ return toTBs(value);
+ }
+
+ @Override
+ public double fromBytes(double value) {
+ return divide(value, TERABYTES);
+ }
+ },
+ GB {
+ @Override
+ public double toBytes(double value) {
+ return multiply(value, GIGABYTES);
+ }
+
+ @Override
+ public double toKBs(double value) {
+ return multiply(value, GIGABYTES / KILOBYTES);
+ }
+
+ @Override
+ public double toMBs(double value) {
+ return multiply(value, GIGABYTES / MEGABYTES);
+ }
+
+ @Override
+ public double toGBs(double value) {
+ return value;
+ }
+
+ @Override
+ public double toTBs(double value) {
+ return divide(value, TERABYTES / GIGABYTES);
+ }
+
+ @Override
+ public double toPBs(double value) {
+ return divide(value, PETABYTES / GIGABYTES);
+ }
+
+ @Override
+ public double toEBs(double value) {
+ return divide(value, EXABYTES / GIGABYTES);
+ }
+
+ @Override
+ public String getLongName() {
+ return "gigabytes";
+ }
+
+ @Override
+ public String getShortName() {
+ return "gb";
+ }
+
+ @Override
+ public String getSuffixChar() {
+ return "g";
+ }
+
+ @Override
+ public double getDefault(double value) {
+ return toGBs(value);
+ }
+
+ @Override
+ public double fromBytes(double value) {
+ return divide(value, GIGABYTES);
+ }
+ },
+ MB {
+ @Override
+ public double toBytes(double value) {
+ return multiply(value, MEGABYTES);
+ }
+
+ @Override
+ public double toKBs(double value) {
+ return multiply(value, MEGABYTES / KILOBYTES);
+ }
+
+ @Override
+ public double toMBs(double value) {
+ return value;
+ }
+
+ @Override
+ public double toGBs(double value) {
+ return divide(value, GIGABYTES / MEGABYTES);
+ }
+
+ @Override
+ public double toTBs(double value) {
+ return divide(value, TERABYTES / MEGABYTES);
+ }
+
+ @Override
+ public double toPBs(double value) {
+ return divide(value, PETABYTES / MEGABYTES);
+ }
+
+ @Override
+ public double toEBs(double value) {
+ return divide(value, EXABYTES / MEGABYTES);
+ }
+
+ @Override
+ public String getLongName() {
+ return "megabytes";
+ }
+
+ @Override
+ public String getShortName() {
+ return "mb";
+ }
+
+ @Override
+ public String getSuffixChar() {
+ return "m";
+ }
+
+ @Override
+ public double fromBytes(double value) {
+ return divide(value, MEGABYTES);
+ }
+
+ @Override
+ public double getDefault(double value) {
+ return toMBs(value);
+ }
+ },
+ KB {
+ @Override
+ public double toBytes(double value) {
+ return multiply(value, KILOBYTES);
+ }
+
+ @Override
+ public double toKBs(double value) {
+ return value;
+ }
+
+ @Override
+ public double toMBs(double value) {
+ return divide(value, MEGABYTES / KILOBYTES);
+ }
+
+ @Override
+ public double toGBs(double value) {
+ return divide(value, GIGABYTES / KILOBYTES);
+ }
+
+ @Override
+ public double toTBs(double value) {
+ return divide(value, TERABYTES / KILOBYTES);
+ }
+
+ @Override
+ public double toPBs(double value) {
+ return divide(value, PETABYTES / KILOBYTES);
+ }
+
+ @Override
+ public double toEBs(double value) {
+ return divide(value, EXABYTES / KILOBYTES);
+ }
+
+ @Override
+ public String getLongName() {
+ return "kilobytes";
+ }
+
+ @Override
+ public String getShortName() {
+ return "kb";
+ }
+
+ @Override
+ public String getSuffixChar() {
+ return "k";
+ }
+
+ @Override
+ public double getDefault(double value) {
+ return toKBs(value);
+ }
+
+ @Override
+ public double fromBytes(double value) {
+ return divide(value, KILOBYTES);
+ }
+ },
+ BYTES {
+ @Override
+ public double toBytes(double value) {
+ return value;
+ }
+
+ @Override
+ public double toKBs(double value) {
+ return divide(value, KILOBYTES);
+ }
+
+ @Override
+ public double toMBs(double value) {
+ return divide(value, MEGABYTES);
+ }
+
+ @Override
+ public double toGBs(double value) {
+ return divide(value, GIGABYTES);
+ }
+
+ @Override
+ public double toTBs(double value) {
+ return divide(value, TERABYTES);
+ }
+
+ @Override
+ public double toPBs(double value) {
+ return divide(value, PETABYTES);
+ }
+
+ @Override
+ public double toEBs(double value) {
+ return divide(value, EXABYTES);
+ }
+
+ @Override
+ public String getLongName() {
+ return "bytes";
+ }
+
+ @Override
+ public String getShortName() {
+ return "b";
+ }
+
+ @Override
+ public String getSuffixChar() {
+ return "b";
+ }
+
+ @Override
+ public double getDefault(double value) {
+ return toBytes(value);
+ }
+
+ @Override
+ public double fromBytes(double value) {
+ return value;
+ }
+ };
+
+ private static final double BYTE = 1L;
+ private static final double KILOBYTES = BYTE * 1024L;
+ private static final double MEGABYTES = KILOBYTES * 1024L;
+ private static final double GIGABYTES = MEGABYTES * 1024L;
+ private static final double TERABYTES = GIGABYTES * 1024L;
+ private static final double PETABYTES = TERABYTES * 1024L;
+ private static final double EXABYTES = PETABYTES * 1024L;
+ private static final int PRECISION = 4;
+
+ /**
+ * Using BigDecimal to avoid issues with overflow and underflow.
+ *
+ * @param value - value
+ * @param divisor - divisor.
+ * @return -- returns a double that represents this value
+ */
+ private static double divide(double value, double divisor) {
+ BigDecimal val = new BigDecimal(value);
+ BigDecimal bDivisor = new BigDecimal(divisor);
+ return val.divide(bDivisor).setScale(PRECISION, RoundingMode.HALF_UP)
+ .doubleValue();
+ }
+
+ /**
+ * Using BigDecimal so we can throw if we are overflowing the Long.Max.
+ *
+ * @param first - First Num.
+ * @param second - Second Num.
+ * @return Returns a double
+ */
+ private static double multiply(double first, double second) {
+ BigDecimal firstVal = new BigDecimal(first);
+ BigDecimal secondVal = new BigDecimal(second);
+ return firstVal.multiply(secondVal)
+ .setScale(PRECISION, RoundingMode.HALF_UP).doubleValue();
+ }
+
+ public abstract double toBytes(double value);
+
+ public abstract double toKBs(double value);
+
+ public abstract double toMBs(double value);
+
+ public abstract double toGBs(double value);
+
+ public abstract double toTBs(double value);
+
+ public abstract double toPBs(double value);
+
+ public abstract double toEBs(double value);
+
+ public abstract String getLongName();
+
+ public abstract String getShortName();
+
+ public abstract String getSuffixChar();
+
+ public abstract double getDefault(double value);
+
+ public abstract double fromBytes(double value);
+
+ public String toString() {
+ return getLongName();
+ }
+
+}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
index 78dcdd632c..24ec4fcc37 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
@@ -48,7 +48,15 @@
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.junit.After;
import org.junit.Before;
+import org.junit.Rule;
import org.junit.Test;
+
+import static org.apache.hadoop.conf.StorageUnit.BYTES;
+import static org.apache.hadoop.conf.StorageUnit.GB;
+import static org.apache.hadoop.conf.StorageUnit.KB;
+import static org.apache.hadoop.conf.StorageUnit.MB;
+import static org.apache.hadoop.conf.StorageUnit.TB;
+import static org.hamcrest.core.Is.is;
import static org.junit.Assert.*;
import static org.junit.Assert.assertArrayEquals;
@@ -68,10 +76,13 @@
import org.apache.log4j.Logger;
import org.apache.log4j.spi.LoggingEvent;
import org.hamcrest.CoreMatchers;
+import org.junit.rules.ExpectedException;
import org.mockito.Mockito;
public class TestConfiguration {
+ @Rule
+ public ExpectedException thrown= ExpectedException.none();
private static final double DOUBLE_DELTA = 0.000000001f;
private Configuration conf;
final static String CONFIG = new File("./test-config-TestConfiguration.xml").getAbsolutePath();
@@ -1325,6 +1336,71 @@ public void testTimeDuration() {
}
}
+ @Test
+ public void testStorageUnit() {
+ final String key = "valid.key";
+ final String nonKey = "not.a.key";
+ Configuration conf = new Configuration(false);
+
+ conf.setStorageSize(key, 10, MB);
+ // This call returns the value specified in the Key as a double in MBs.
+ assertThat(conf.getStorageSize(key, "1GB", MB),
+ is(10.0));
+
+ // Since this key is missing, This call converts the default value of 1GB
+ // to MBs are returns that value.
+ assertThat(conf.getStorageSize(nonKey, "1GB", MB),
+ is(1024.0));
+
+
+ conf.setStorageSize(key, 1024, BYTES);
+ assertThat(conf.getStorageSize(key, 100, KB), is(1.0));
+
+ assertThat(conf.getStorageSize(nonKey, 100.0, KB), is(100.0));
+
+ // We try out different kind of String formats to see if they work and
+ // during read, we also try to read using a different Storage Units.
+ conf.setStrings(key, "1TB");
+ assertThat(conf.getStorageSize(key, "1PB", GB), is(1024.0));
+
+ conf.setStrings(key, "1bytes");
+ assertThat(conf.getStorageSize(key, "1PB", KB), is(0.001));
+
+ conf.setStrings(key, "2048b");
+ assertThat(conf.getStorageSize(key, "1PB", KB), is(2.0));
+
+ conf.setStrings(key, "64 GB");
+ assertThat(conf.getStorageSize(key, "1PB", GB), is(64.0));
+
+ // Match the parsing patterns of getLongBytes, which takes single char
+ // suffix.
+ conf.setStrings(key, "1T");
+ assertThat(conf.getStorageSize(key, "1GB", TB), is(1.0));
+
+ conf.setStrings(key, "1k");
+ assertThat(conf.getStorageSize(key, "1GB", KB), is(1.0));
+
+ conf.setStrings(key, "10m");
+ assertThat(conf.getStorageSize(key, "1GB", MB), is(10.0));
+
+
+
+ // Missing format specification, this should throw.
+ conf.setStrings(key, "100");
+ thrown.expect(IllegalArgumentException.class);
+ conf.getStorageSize(key, "1PB", GB);
+
+ // illegal format specification, this should throw.
+ conf.setStrings(key, "1HB");
+ thrown.expect(IllegalArgumentException.class);
+ conf.getStorageSize(key, "1PB", GB);
+
+ // Illegal number specification, this should throw.
+ conf.setStrings(key, "HadoopGB");
+ thrown.expect(IllegalArgumentException.class);
+ conf.getStorageSize(key, "1PB", GB);
+ }
+
@Test
public void testTimeDurationWarning() {
// check warn for possible loss of precision
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestStorageUnit.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestStorageUnit.java
new file mode 100644
index 0000000000..e29345d0d1
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestStorageUnit.java
@@ -0,0 +1,277 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ *
+ */
+
+package org.apache.hadoop.conf;
+
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
+
+/**
+ * Tests that Storage Units work as expected.
+ */
+public class TestStorageUnit {
+ final static double KB = 1024.0;
+ final static double MB = KB * 1024.0;
+ final static double GB = MB * 1024.0;
+ final static double TB = GB * 1024.0;
+ final static double PB = TB * 1024.0;
+ final static double EB = PB * 1024.0;
+
+ @Test
+ public void testByteToKiloBytes() {
+ Map results = new HashMap<>();
+ results.put(1024.0, 1.0);
+ results.put(2048.0, 2.0);
+ results.put(-1024.0, -1.0);
+ results.put(34565.0, 33.7549);
+ results.put(223344332.0, 218109.6992);
+ results.put(1234983.0, 1206.0381);
+ results.put(1234332.0, 1205.4023);
+ results.put(0.0, 0.0);
+
+ for (Map.Entry entry : results.entrySet()) {
+ assertThat(StorageUnit.BYTES.toKBs(entry.getKey()), is(entry.getValue()));
+ }
+ }
+
+ @Test
+ public void testBytesToMegaBytes() {
+ Map results = new HashMap<>();
+ results.put(1048576.0, 1.0);
+ results.put(24117248.0, 23.0);
+ results.put(459920023.0, 438.6139);
+ results.put(234443233.0, 223.5825);
+ results.put(-35651584.0, -34.0);
+ results.put(0.0, 0.0);
+ for (Map.Entry entry : results.entrySet()) {
+ assertThat(StorageUnit.BYTES.toMBs(entry.getKey()), is(entry.getValue()));
+ }
+ }
+
+ @Test
+ public void testBytesToGigaBytes() {
+ Map results = new HashMap<>();
+ results.put(1073741824.0, 1.0);
+ results.put(24696061952.0, 23.0);
+ results.put(459920023.0, 0.4283);
+ results.put(234443233.0, 0.2183);
+ results.put(-36507222016.0, -34.0);
+ results.put(0.0, 0.0);
+ for (Map.Entry entry : results.entrySet()) {
+ assertThat(StorageUnit.BYTES.toGBs(entry.getKey()), is(entry.getValue()));
+ }
+ }
+
+ @Test
+ public void testBytesToTerraBytes() {
+ Map results = new HashMap<>();
+ results.put(1.09951E+12, 1.0);
+ results.put(2.52888E+13, 23.0);
+ results.put(459920023.0, 0.0004);
+ results.put(234443233.0, 0.0002);
+ results.put(-3.73834E+13, -34.0);
+ results.put(0.0, 0.0);
+ for (Map.Entry entry : results.entrySet()) {
+ assertThat(StorageUnit.BYTES.toTBs(entry.getKey()), is(entry.getValue()));
+ }
+ }
+
+ @Test
+ public void testBytesToPetaBytes() {
+ Map results = new HashMap<>();
+ results.put(1.1259E+15, 1.0);
+ results.put(2.58957E+16, 23.0);
+ results.put(4.70958E+11, 0.0004);
+ results.put(234443233.0, 0.0000); // Out of precision window.
+ results.put(-3.82806E+16, -34.0);
+ results.put(0.0, 0.0);
+ for (Map.Entry entry : results.entrySet()) {
+ assertThat(StorageUnit.BYTES.toPBs(entry.getKey()), is(entry.getValue()));
+ }
+ }
+
+ @Test
+ public void testBytesToExaBytes() {
+ Map results = new HashMap<>();
+ results.put(1.15292E+18, 1.0);
+ results.put(2.65172E+19, 23.0);
+ results.put(4.82261E+14, 0.0004);
+ results.put(234443233.0, 0.0000); // Out of precision window.
+ results.put(-3.91993E+19, -34.0);
+ results.put(0.0, 0.0);
+ for (Map.Entry entry : results.entrySet()) {
+ assertThat(StorageUnit.BYTES.toEBs(entry.getKey()), is(entry.getValue()));
+ }
+ }
+
+ @Test
+ public void testByteConversions() {
+ assertThat(StorageUnit.BYTES.getShortName(), is("b"));
+ assertThat(StorageUnit.BYTES.getSuffixChar(), is("b"));
+
+ assertThat(StorageUnit.BYTES.getLongName(), is("bytes"));
+ assertThat(StorageUnit.BYTES.toString(), is("bytes"));
+ assertThat(StorageUnit.BYTES.toBytes(1), is(1.0));
+ assertThat(StorageUnit.BYTES.toBytes(1024),
+ is(StorageUnit.BYTES.getDefault(1024)));
+ assertThat(StorageUnit.BYTES.fromBytes(10), is(10.0));
+ }
+
+ @Test
+ public void testKBConversions() {
+ assertThat(StorageUnit.KB.getShortName(), is("kb"));
+ assertThat(StorageUnit.KB.getSuffixChar(), is("k"));
+ assertThat(StorageUnit.KB.getLongName(), is("kilobytes"));
+ assertThat(StorageUnit.KB.toString(), is("kilobytes"));
+ assertThat(StorageUnit.KB.toKBs(1024),
+ is(StorageUnit.KB.getDefault(1024)));
+
+
+ assertThat(StorageUnit.KB.toBytes(1), is(KB));
+ assertThat(StorageUnit.KB.fromBytes(KB), is(1.0));
+
+ assertThat(StorageUnit.KB.toKBs(10), is(10.0));
+ assertThat(StorageUnit.KB.toMBs(3.0 * 1024.0), is(3.0));
+ assertThat(StorageUnit.KB.toGBs(1073741824), is(1024.0));
+ assertThat(StorageUnit.KB.toTBs(1073741824), is(1.0));
+ assertThat(StorageUnit.KB.toPBs(1.0995116e+12), is(1.0));
+ assertThat(StorageUnit.KB.toEBs(1.1258999e+15), is(1.0));
+ }
+
+ @Test
+ public void testMBConversions() {
+ assertThat(StorageUnit.MB.getShortName(), is("mb"));
+ assertThat(StorageUnit.MB.getSuffixChar(), is("m"));
+ assertThat(StorageUnit.MB.getLongName(), is("megabytes"));
+ assertThat(StorageUnit.MB.toString(), is("megabytes"));
+ assertThat(StorageUnit.MB.toMBs(1024),
+ is(StorageUnit.MB.getDefault(1024)));
+
+
+
+ assertThat(StorageUnit.MB.toBytes(1), is(MB));
+ assertThat(StorageUnit.MB.fromBytes(MB), is(1.0));
+
+ assertThat(StorageUnit.MB.toKBs(1), is(1024.0));
+ assertThat(StorageUnit.MB.toMBs(10), is(10.0));
+
+ assertThat(StorageUnit.MB.toGBs(44040192), is(43008.0));
+ assertThat(StorageUnit.MB.toTBs(1073741824), is(1024.0));
+ assertThat(StorageUnit.MB.toPBs(1073741824), is(1.0));
+ assertThat(StorageUnit.MB.toEBs(1 * (EB/MB)), is(1.0));
+ }
+
+ @Test
+ public void testGBConversions() {
+ assertThat(StorageUnit.GB.getShortName(), is("gb"));
+ assertThat(StorageUnit.GB.getSuffixChar(), is("g"));
+ assertThat(StorageUnit.GB.getLongName(), is("gigabytes"));
+ assertThat(StorageUnit.GB.toString(), is("gigabytes"));
+ assertThat(StorageUnit.GB.toGBs(1024),
+ is(StorageUnit.GB.getDefault(1024)));
+
+
+ assertThat(StorageUnit.GB.toBytes(1), is(GB));
+ assertThat(StorageUnit.GB.fromBytes(GB), is(1.0));
+
+ assertThat(StorageUnit.GB.toKBs(1), is(1024.0 * 1024));
+ assertThat(StorageUnit.GB.toMBs(10), is(10.0 * 1024));
+
+ assertThat(StorageUnit.GB.toGBs(44040192.0), is(44040192.0));
+ assertThat(StorageUnit.GB.toTBs(1073741824), is(1048576.0));
+ assertThat(StorageUnit.GB.toPBs(1.07375e+9), is(1024.0078));
+ assertThat(StorageUnit.GB.toEBs(1 * (EB/GB)), is(1.0));
+ }
+
+ @Test
+ public void testTBConversions() {
+ assertThat(StorageUnit.TB.getShortName(), is("tb"));
+ assertThat(StorageUnit.TB.getSuffixChar(), is("t"));
+ assertThat(StorageUnit.TB.getLongName(), is("terabytes"));
+ assertThat(StorageUnit.TB.toString(), is("terabytes"));
+ assertThat(StorageUnit.TB.toTBs(1024),
+ is(StorageUnit.TB.getDefault(1024)));
+
+ assertThat(StorageUnit.TB.toBytes(1), is(TB));
+ assertThat(StorageUnit.TB.fromBytes(TB), is(1.0));
+
+ assertThat(StorageUnit.TB.toKBs(1), is(1024.0 * 1024* 1024));
+ assertThat(StorageUnit.TB.toMBs(10), is(10.0 * 1024 * 1024));
+
+ assertThat(StorageUnit.TB.toGBs(44040192.0), is(45097156608.0));
+ assertThat(StorageUnit.TB.toTBs(1073741824.0), is(1073741824.0));
+ assertThat(StorageUnit.TB.toPBs(1024), is(1.0));
+ assertThat(StorageUnit.TB.toEBs(1 * (EB/TB)), is(1.0));
+ }
+
+ @Test
+ public void testPBConversions() {
+ assertThat(StorageUnit.PB.getShortName(), is("pb"));
+ assertThat(StorageUnit.PB.getSuffixChar(), is("p"));
+ assertThat(StorageUnit.PB.getLongName(), is("petabytes"));
+ assertThat(StorageUnit.PB.toString(), is("petabytes"));
+ assertThat(StorageUnit.PB.toPBs(1024),
+ is(StorageUnit.PB.getDefault(1024)));
+
+
+ assertThat(StorageUnit.PB.toBytes(1), is(PB));
+ assertThat(StorageUnit.PB.fromBytes(PB), is(1.0));
+
+ assertThat(StorageUnit.PB.toKBs(1), is(PB/KB));
+ assertThat(StorageUnit.PB.toMBs(10), is(10.0 * (PB / MB)));
+
+ assertThat(StorageUnit.PB.toGBs(44040192.0),
+ is(44040192.0 * PB/GB));
+ assertThat(StorageUnit.PB.toTBs(1073741824.0),
+ is(1073741824.0 * (PB/TB)));
+ assertThat(StorageUnit.PB.toPBs(1024.0), is(1024.0));
+ assertThat(StorageUnit.PB.toEBs(1024.0), is(1.0));
+ }
+
+
+ @Test
+ public void testEBConversions() {
+ assertThat(StorageUnit.EB.getShortName(), is("eb"));
+ assertThat(StorageUnit.EB.getSuffixChar(), is("e"));
+
+ assertThat(StorageUnit.EB.getLongName(), is("exabytes"));
+ assertThat(StorageUnit.EB.toString(), is("exabytes"));
+ assertThat(StorageUnit.EB.toEBs(1024),
+ is(StorageUnit.EB.getDefault(1024)));
+
+ assertThat(StorageUnit.EB.toBytes(1), is(EB));
+ assertThat(StorageUnit.EB.fromBytes(EB), is(1.0));
+
+ assertThat(StorageUnit.EB.toKBs(1), is(EB/KB));
+ assertThat(StorageUnit.EB.toMBs(10), is(10.0 * (EB / MB)));
+
+ assertThat(StorageUnit.EB.toGBs(44040192.0),
+ is(44040192.0 * EB/GB));
+ assertThat(StorageUnit.EB.toTBs(1073741824.0),
+ is(1073741824.0 * (EB/TB)));
+ assertThat(StorageUnit.EB.toPBs(1.0), is(1024.0));
+ assertThat(StorageUnit.EB.toEBs(42.0), is(42.0));
+ }
+
+
+}