diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirEncryptionZoneOp.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirEncryptionZoneOp.java index 2319741c2e3..d09623813be 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirEncryptionZoneOp.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirEncryptionZoneOp.java @@ -49,6 +49,7 @@ import org.apache.hadoop.hdfs.protocolPB.PBHelperClient; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.protobuf.InvalidProtocolBufferException; +import static org.apache.hadoop.util.Time.monotonicNow; /** * Helper class to perform encryption zone operation. @@ -76,11 +77,14 @@ final class FSDirEncryptionZoneOp { return null; } EncryptedKeyVersion edek = null; + long generateEDEKStartTime = monotonicNow(); try { edek = fsd.getProvider().generateEncryptedKey(ezKeyName); } catch (GeneralSecurityException e) { throw new IOException(e); } + long generateEDEKTime = monotonicNow() - generateEDEKStartTime; + NameNode.getNameNodeMetrics().addGenerateEDEKTime(generateEDEKTime); Preconditions.checkNotNull(edek); return edek; } @@ -355,6 +359,7 @@ final class FSDirEncryptionZoneOp { int sinceLastLog = logCoolDown; // always print the first failure boolean success = false; IOException lastSeenIOE = null; + long warmUpEDEKStartTime = monotonicNow(); while (true) { try { kp.warmUpEncryptedKeys(keyNames); @@ -382,6 +387,8 @@ final class FSDirEncryptionZoneOp { } sinceLastLog += retryInterval; } + long warmUpEDEKTime = monotonicNow() - warmUpEDEKStartTime; + NameNode.getNameNodeMetrics().addWarmUpEDEKTime(warmUpEDEKTime); if (!success) { NameNode.LOG.warn("Unable to warm up EDEKs."); if (lastSeenIOE != null) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/metrics/NameNodeMetrics.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/metrics/NameNodeMetrics.java index 54b5c6ebf4d..e214451a4bf 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/metrics/NameNodeMetrics.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/metrics/NameNodeMetrics.java @@ -115,6 +115,10 @@ public class NameNodeMetrics { final MutableQuantiles[] blockReportQuantiles; @Metric("Cache report") MutableRate cacheReport; final MutableQuantiles[] cacheReportQuantiles; + @Metric("Generate EDEK time") private MutableRate generateEDEKTime; + private final MutableQuantiles[] generateEDEKTimeQuantiles; + @Metric("Warm-up EDEK time") private MutableRate warmUpEDEKTime; + private final MutableQuantiles[] warmUpEDEKTimeQuantiles; @Metric("Duration in SafeMode at startup in msec") MutableGaugeInt safeModeTime; @@ -139,6 +143,8 @@ public class NameNodeMetrics { syncsQuantiles = new MutableQuantiles[len]; blockReportQuantiles = new MutableQuantiles[len]; cacheReportQuantiles = new MutableQuantiles[len]; + generateEDEKTimeQuantiles = new MutableQuantiles[len]; + warmUpEDEKTimeQuantiles = new MutableQuantiles[len]; for (int i = 0; i < len; i++) { int interval = intervals[i]; @@ -151,6 +157,12 @@ public class NameNodeMetrics { cacheReportQuantiles[i] = registry.newQuantiles( "cacheReport" + interval + "s", "Cache report", "ops", "latency", interval); + generateEDEKTimeQuantiles[i] = registry.newQuantiles( + "generateEDEKTime" + interval + "s", + "Generate EDEK time", "ops", "latency", interval); + warmUpEDEKTimeQuantiles[i] = registry.newQuantiles( + "warmupEDEKTime" + interval + "s", + "Warm up EDEK time", "ops", "latency", interval); } } @@ -327,4 +339,18 @@ public class NameNodeMetrics { public void addPutImage(long latency) { putImage.add(latency); } + + public void addGenerateEDEKTime(long latency) { + generateEDEKTime.add(latency); + for (MutableQuantiles q : generateEDEKTimeQuantiles) { + q.add(latency); + } + } + + public void addWarmUpEDEKTime(long latency) { + warmUpEDEKTime.add(latency); + for (MutableQuantiles q : warmUpEDEKTimeQuantiles) { + q.add(latency); + } + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java index e21b00d7dab..7fbcdb6a914 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java @@ -17,6 +17,12 @@ */ package org.apache.hadoop.hdfs.server.namenode.metrics; +import org.apache.hadoop.crypto.key.JavaKeyStoreProvider; +import org.apache.hadoop.fs.FileSystemTestHelper; +import org.apache.hadoop.fs.FileSystemTestWrapper; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hdfs.client.CreateEncryptionZoneFlag; +import org.apache.hadoop.hdfs.client.HdfsAdmin; import static org.apache.hadoop.test.MetricsAsserts.assertCounter; import static org.apache.hadoop.test.MetricsAsserts.assertGauge; import static org.apache.hadoop.test.MetricsAsserts.assertQuantileGauges; @@ -25,7 +31,10 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.DataInputStream; +import java.io.File; import java.io.IOException; +import java.security.NoSuchAlgorithmException; +import java.util.EnumSet; import java.util.Random; import com.google.common.collect.ImmutableList; @@ -622,4 +631,54 @@ public class TestNameNodeMetrics { fs1.close(); } } + + @Test + public void testGenerateEDEKTime() throws IOException, + NoSuchAlgorithmException { + //Create new MiniDFSCluster with EncryptionZone configurations + Configuration conf = new HdfsConfiguration(); + FileSystemTestHelper fsHelper = new FileSystemTestHelper(); + // Set up java key store + String testRoot = fsHelper.getTestRootDir(); + File testRootDir = new File(testRoot).getAbsoluteFile(); + conf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI, + JavaKeyStoreProvider.SCHEME_NAME + "://file" + + new Path(testRootDir.toString(), "test.jks").toUri()); + conf.setBoolean(DFSConfigKeys + .DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true); + conf.setInt(DFSConfigKeys.DFS_NAMENODE_LIST_ENCRYPTION_ZONES_NUM_RESPONSES, + 2); + + try (MiniDFSCluster clusterEDEK = new MiniDFSCluster.Builder(conf) + .numDataNodes(1).build()) { + + DistributedFileSystem fsEDEK = + clusterEDEK.getFileSystem(); + FileSystemTestWrapper fsWrapper = new FileSystemTestWrapper( + fsEDEK); + HdfsAdmin dfsAdmin = new HdfsAdmin(clusterEDEK.getURI(), + conf); + fsEDEK.getClient().setKeyProvider( + clusterEDEK.getNameNode().getNamesystem() + .getProvider()); + + String testKey = "test_key"; + DFSTestUtil.createKey(testKey, clusterEDEK, conf); + + final Path zoneParent = new Path("/zones"); + final Path zone1 = new Path(zoneParent, "zone1"); + fsWrapper.mkdir(zone1, FsPermission.getDirDefault(), true); + dfsAdmin.createEncryptionZone(zone1, "test_key", EnumSet.of( + CreateEncryptionZoneFlag.NO_TRASH)); + + MetricsRecordBuilder rb = getMetrics(NN_METRICS); + + for (int i = 0; i < 3; i++) { + Path filePath = new Path("/zones/zone1/testfile-" + i); + DFSTestUtil.createFile(fsEDEK, filePath, 1024, (short) 3, 1L); + + assertQuantileGauges("GenerateEDEKTime1s", rb); + } + } + } }