HDFS-10676. Add namenode metric to measure time spent in generating EDEKs. Contributed by Hanisha Koneru.

This commit is contained in:
Xiaoyu Yao 2016-07-28 16:02:06 -07:00
parent 4e756d7271
commit ce3d68e9c3
3 changed files with 92 additions and 0 deletions

View File

@ -49,6 +49,7 @@ import org.apache.hadoop.hdfs.protocolPB.PBHelperClient;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.InvalidProtocolBufferException;
import static org.apache.hadoop.util.Time.monotonicNow;
/** /**
* Helper class to perform encryption zone operation. * Helper class to perform encryption zone operation.
@ -76,11 +77,14 @@ final class FSDirEncryptionZoneOp {
return null; return null;
} }
EncryptedKeyVersion edek = null; EncryptedKeyVersion edek = null;
long generateEDEKStartTime = monotonicNow();
try { try {
edek = fsd.getProvider().generateEncryptedKey(ezKeyName); edek = fsd.getProvider().generateEncryptedKey(ezKeyName);
} catch (GeneralSecurityException e) { } catch (GeneralSecurityException e) {
throw new IOException(e); throw new IOException(e);
} }
long generateEDEKTime = monotonicNow() - generateEDEKStartTime;
NameNode.getNameNodeMetrics().addGenerateEDEKTime(generateEDEKTime);
Preconditions.checkNotNull(edek); Preconditions.checkNotNull(edek);
return edek; return edek;
} }
@ -355,6 +359,7 @@ final class FSDirEncryptionZoneOp {
int sinceLastLog = logCoolDown; // always print the first failure int sinceLastLog = logCoolDown; // always print the first failure
boolean success = false; boolean success = false;
IOException lastSeenIOE = null; IOException lastSeenIOE = null;
long warmUpEDEKStartTime = monotonicNow();
while (true) { while (true) {
try { try {
kp.warmUpEncryptedKeys(keyNames); kp.warmUpEncryptedKeys(keyNames);
@ -382,6 +387,8 @@ final class FSDirEncryptionZoneOp {
} }
sinceLastLog += retryInterval; sinceLastLog += retryInterval;
} }
long warmUpEDEKTime = monotonicNow() - warmUpEDEKStartTime;
NameNode.getNameNodeMetrics().addWarmUpEDEKTime(warmUpEDEKTime);
if (!success) { if (!success) {
NameNode.LOG.warn("Unable to warm up EDEKs."); NameNode.LOG.warn("Unable to warm up EDEKs.");
if (lastSeenIOE != null) { if (lastSeenIOE != null) {

View File

@ -115,6 +115,10 @@ public class NameNodeMetrics {
final MutableQuantiles[] blockReportQuantiles; final MutableQuantiles[] blockReportQuantiles;
@Metric("Cache report") MutableRate cacheReport; @Metric("Cache report") MutableRate cacheReport;
final MutableQuantiles[] cacheReportQuantiles; final MutableQuantiles[] cacheReportQuantiles;
@Metric("Generate EDEK time") private MutableRate generateEDEKTime;
private final MutableQuantiles[] generateEDEKTimeQuantiles;
@Metric("Warm-up EDEK time") private MutableRate warmUpEDEKTime;
private final MutableQuantiles[] warmUpEDEKTimeQuantiles;
@Metric("Duration in SafeMode at startup in msec") @Metric("Duration in SafeMode at startup in msec")
MutableGaugeInt safeModeTime; MutableGaugeInt safeModeTime;
@ -139,6 +143,8 @@ public class NameNodeMetrics {
syncsQuantiles = new MutableQuantiles[len]; syncsQuantiles = new MutableQuantiles[len];
blockReportQuantiles = new MutableQuantiles[len]; blockReportQuantiles = new MutableQuantiles[len];
cacheReportQuantiles = new MutableQuantiles[len]; cacheReportQuantiles = new MutableQuantiles[len];
generateEDEKTimeQuantiles = new MutableQuantiles[len];
warmUpEDEKTimeQuantiles = new MutableQuantiles[len];
for (int i = 0; i < len; i++) { for (int i = 0; i < len; i++) {
int interval = intervals[i]; int interval = intervals[i];
@ -151,6 +157,12 @@ public class NameNodeMetrics {
cacheReportQuantiles[i] = registry.newQuantiles( cacheReportQuantiles[i] = registry.newQuantiles(
"cacheReport" + interval + "s", "cacheReport" + interval + "s",
"Cache report", "ops", "latency", interval); "Cache report", "ops", "latency", interval);
generateEDEKTimeQuantiles[i] = registry.newQuantiles(
"generateEDEKTime" + interval + "s",
"Generate EDEK time", "ops", "latency", interval);
warmUpEDEKTimeQuantiles[i] = registry.newQuantiles(
"warmupEDEKTime" + interval + "s",
"Warm up EDEK time", "ops", "latency", interval);
} }
} }
@ -327,4 +339,18 @@ public class NameNodeMetrics {
public void addPutImage(long latency) { public void addPutImage(long latency) {
putImage.add(latency); putImage.add(latency);
} }
public void addGenerateEDEKTime(long latency) {
generateEDEKTime.add(latency);
for (MutableQuantiles q : generateEDEKTimeQuantiles) {
q.add(latency);
}
}
public void addWarmUpEDEKTime(long latency) {
warmUpEDEKTime.add(latency);
for (MutableQuantiles q : warmUpEDEKTimeQuantiles) {
q.add(latency);
}
}
} }

View File

@ -17,6 +17,12 @@
*/ */
package org.apache.hadoop.hdfs.server.namenode.metrics; package org.apache.hadoop.hdfs.server.namenode.metrics;
import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
import org.apache.hadoop.fs.FileSystemTestHelper;
import org.apache.hadoop.fs.FileSystemTestWrapper;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.client.CreateEncryptionZoneFlag;
import org.apache.hadoop.hdfs.client.HdfsAdmin;
import static org.apache.hadoop.test.MetricsAsserts.assertCounter; import static org.apache.hadoop.test.MetricsAsserts.assertCounter;
import static org.apache.hadoop.test.MetricsAsserts.assertGauge; import static org.apache.hadoop.test.MetricsAsserts.assertGauge;
import static org.apache.hadoop.test.MetricsAsserts.assertQuantileGauges; import static org.apache.hadoop.test.MetricsAsserts.assertQuantileGauges;
@ -25,7 +31,10 @@ import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import java.io.DataInputStream; import java.io.DataInputStream;
import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.security.NoSuchAlgorithmException;
import java.util.EnumSet;
import java.util.Random; import java.util.Random;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
@ -621,4 +630,54 @@ public class TestNameNodeMetrics {
fs1.close(); fs1.close();
} }
} }
@Test
public void testGenerateEDEKTime() throws IOException,
NoSuchAlgorithmException {
//Create new MiniDFSCluster with EncryptionZone configurations
Configuration conf = new HdfsConfiguration();
FileSystemTestHelper fsHelper = new FileSystemTestHelper();
// Set up java key store
String testRoot = fsHelper.getTestRootDir();
File testRootDir = new File(testRoot).getAbsoluteFile();
conf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI,
JavaKeyStoreProvider.SCHEME_NAME + "://file" +
new Path(testRootDir.toString(), "test.jks").toUri());
conf.setBoolean(DFSConfigKeys
.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_LIST_ENCRYPTION_ZONES_NUM_RESPONSES,
2);
try (MiniDFSCluster clusterEDEK = new MiniDFSCluster.Builder(conf)
.numDataNodes(1).build()) {
DistributedFileSystem fsEDEK =
clusterEDEK.getFileSystem();
FileSystemTestWrapper fsWrapper = new FileSystemTestWrapper(
fsEDEK);
HdfsAdmin dfsAdmin = new HdfsAdmin(clusterEDEK.getURI(),
conf);
fsEDEK.getClient().setKeyProvider(
clusterEDEK.getNameNode().getNamesystem()
.getProvider());
String testKey = "test_key";
DFSTestUtil.createKey(testKey, clusterEDEK, conf);
final Path zoneParent = new Path("/zones");
final Path zone1 = new Path(zoneParent, "zone1");
fsWrapper.mkdir(zone1, FsPermission.getDirDefault(), true);
dfsAdmin.createEncryptionZone(zone1, "test_key", EnumSet.of(
CreateEncryptionZoneFlag.NO_TRASH));
MetricsRecordBuilder rb = getMetrics(NN_METRICS);
for (int i = 0; i < 3; i++) {
Path filePath = new Path("/zones/zone1/testfile-" + i);
DFSTestUtil.createFile(fsEDEK, filePath, 1024, (short) 3, 1L);
assertQuantileGauges("GenerateEDEKTime1s", rb);
}
}
}
} }