HDFS-10676. Add namenode metric to measure time spent in generating EDEKs. Contributed by Hanisha Koneru.

This commit is contained in:
Xiaoyu Yao 2016-07-28 16:02:06 -07:00
parent 4e756d7271
commit ce3d68e9c3
3 changed files with 92 additions and 0 deletions

View File

@ -49,6 +49,7 @@
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.protobuf.InvalidProtocolBufferException;
import static org.apache.hadoop.util.Time.monotonicNow;
/**
* Helper class to perform encryption zone operation.
@ -76,11 +77,14 @@ static EncryptedKeyVersion generateEncryptedDataEncryptionKey(
return null;
}
EncryptedKeyVersion edek = null;
long generateEDEKStartTime = monotonicNow();
try {
edek = fsd.getProvider().generateEncryptedKey(ezKeyName);
} catch (GeneralSecurityException e) {
throw new IOException(e);
}
long generateEDEKTime = monotonicNow() - generateEDEKStartTime;
NameNode.getNameNodeMetrics().addGenerateEDEKTime(generateEDEKTime);
Preconditions.checkNotNull(edek);
return edek;
}
@ -355,6 +359,7 @@ public void run() {
int sinceLastLog = logCoolDown; // always print the first failure
boolean success = false;
IOException lastSeenIOE = null;
long warmUpEDEKStartTime = monotonicNow();
while (true) {
try {
kp.warmUpEncryptedKeys(keyNames);
@ -382,6 +387,8 @@ public void run() {
}
sinceLastLog += retryInterval;
}
long warmUpEDEKTime = monotonicNow() - warmUpEDEKStartTime;
NameNode.getNameNodeMetrics().addWarmUpEDEKTime(warmUpEDEKTime);
if (!success) {
NameNode.LOG.warn("Unable to warm up EDEKs.");
if (lastSeenIOE != null) {

View File

@ -115,6 +115,10 @@ public long totalFileOps(){
final MutableQuantiles[] blockReportQuantiles;
@Metric("Cache report") MutableRate cacheReport;
final MutableQuantiles[] cacheReportQuantiles;
@Metric("Generate EDEK time") private MutableRate generateEDEKTime;
private final MutableQuantiles[] generateEDEKTimeQuantiles;
@Metric("Warm-up EDEK time") private MutableRate warmUpEDEKTime;
private final MutableQuantiles[] warmUpEDEKTimeQuantiles;
@Metric("Duration in SafeMode at startup in msec")
MutableGaugeInt safeModeTime;
@ -139,6 +143,8 @@ public long totalFileOps(){
syncsQuantiles = new MutableQuantiles[len];
blockReportQuantiles = new MutableQuantiles[len];
cacheReportQuantiles = new MutableQuantiles[len];
generateEDEKTimeQuantiles = new MutableQuantiles[len];
warmUpEDEKTimeQuantiles = new MutableQuantiles[len];
for (int i = 0; i < len; i++) {
int interval = intervals[i];
@ -151,6 +157,12 @@ public long totalFileOps(){
cacheReportQuantiles[i] = registry.newQuantiles(
"cacheReport" + interval + "s",
"Cache report", "ops", "latency", interval);
generateEDEKTimeQuantiles[i] = registry.newQuantiles(
"generateEDEKTime" + interval + "s",
"Generate EDEK time", "ops", "latency", interval);
warmUpEDEKTimeQuantiles[i] = registry.newQuantiles(
"warmupEDEKTime" + interval + "s",
"Warm up EDEK time", "ops", "latency", interval);
}
}
@ -327,4 +339,18 @@ public void addGetImage(long latency) {
public void addPutImage(long latency) {
putImage.add(latency);
}
public void addGenerateEDEKTime(long latency) {
generateEDEKTime.add(latency);
for (MutableQuantiles q : generateEDEKTimeQuantiles) {
q.add(latency);
}
}
public void addWarmUpEDEKTime(long latency) {
warmUpEDEKTime.add(latency);
for (MutableQuantiles q : warmUpEDEKTimeQuantiles) {
q.add(latency);
}
}
}

View File

@ -17,6 +17,12 @@
*/
package org.apache.hadoop.hdfs.server.namenode.metrics;
import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
import org.apache.hadoop.fs.FileSystemTestHelper;
import org.apache.hadoop.fs.FileSystemTestWrapper;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.client.CreateEncryptionZoneFlag;
import org.apache.hadoop.hdfs.client.HdfsAdmin;
import static org.apache.hadoop.test.MetricsAsserts.assertCounter;
import static org.apache.hadoop.test.MetricsAsserts.assertGauge;
import static org.apache.hadoop.test.MetricsAsserts.assertQuantileGauges;
@ -25,7 +31,10 @@
import static org.junit.Assert.assertTrue;
import java.io.DataInputStream;
import java.io.File;
import java.io.IOException;
import java.security.NoSuchAlgorithmException;
import java.util.EnumSet;
import java.util.Random;
import com.google.common.collect.ImmutableList;
@ -621,4 +630,54 @@ public void testNumActiveClientsAndFilesUnderConstructionMetrics()
fs1.close();
}
}
@Test
public void testGenerateEDEKTime() throws IOException,
NoSuchAlgorithmException {
//Create new MiniDFSCluster with EncryptionZone configurations
Configuration conf = new HdfsConfiguration();
FileSystemTestHelper fsHelper = new FileSystemTestHelper();
// Set up java key store
String testRoot = fsHelper.getTestRootDir();
File testRootDir = new File(testRoot).getAbsoluteFile();
conf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI,
JavaKeyStoreProvider.SCHEME_NAME + "://file" +
new Path(testRootDir.toString(), "test.jks").toUri());
conf.setBoolean(DFSConfigKeys
.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_LIST_ENCRYPTION_ZONES_NUM_RESPONSES,
2);
try (MiniDFSCluster clusterEDEK = new MiniDFSCluster.Builder(conf)
.numDataNodes(1).build()) {
DistributedFileSystem fsEDEK =
clusterEDEK.getFileSystem();
FileSystemTestWrapper fsWrapper = new FileSystemTestWrapper(
fsEDEK);
HdfsAdmin dfsAdmin = new HdfsAdmin(clusterEDEK.getURI(),
conf);
fsEDEK.getClient().setKeyProvider(
clusterEDEK.getNameNode().getNamesystem()
.getProvider());
String testKey = "test_key";
DFSTestUtil.createKey(testKey, clusterEDEK, conf);
final Path zoneParent = new Path("/zones");
final Path zone1 = new Path(zoneParent, "zone1");
fsWrapper.mkdir(zone1, FsPermission.getDirDefault(), true);
dfsAdmin.createEncryptionZone(zone1, "test_key", EnumSet.of(
CreateEncryptionZoneFlag.NO_TRASH));
MetricsRecordBuilder rb = getMetrics(NN_METRICS);
for (int i = 0; i < 3; i++) {
Path filePath = new Path("/zones/zone1/testfile-" + i);
DFSTestUtil.createFile(fsEDEK, filePath, 1024, (short) 3, 1L);
assertQuantileGauges("GenerateEDEKTime1s", rb);
}
}
}
}