HDFS-7067. ClassCastException while using a key created by keytool to create encryption zone. (Charles Lamb via Colin P. McCabe)

This commit is contained in:
Colin Patrick Mccabe 2015-01-14 17:42:59 -08:00
parent 6464a8929a
commit a5a033c7ca
3 changed files with 52 additions and 1 deletions

View File

@ -21,7 +21,6 @@
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
@ -401,6 +400,10 @@ public Metadata getMetadata(String name) throws IOException {
Metadata meta = ((KeyMetadata) keyStore.getKey(name, password)).metadata; Metadata meta = ((KeyMetadata) keyStore.getKey(name, password)).metadata;
cache.put(name, meta); cache.put(name, meta);
return meta; return meta;
} catch (ClassCastException e) {
throw new IOException("Can't cast key for " + name + " in keystore " +
path + " to a KeyMetadata. Key may have been added using " +
" keytool or some other non-Hadoop method.", e);
} catch (KeyStoreException e) { } catch (KeyStoreException e) {
throw new IOException("Can't get metadata for " + name + throw new IOException("Can't get metadata for " + name +
" from keystore " + path, e); " from keystore " + path, e);

View File

@ -42,6 +42,7 @@
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.fail;
public class TestKeyProviderFactory { public class TestKeyProviderFactory {
@ -430,4 +431,51 @@ public void testGetProviderViaURI() throws Exception {
Assert.assertNull(kp); Assert.assertNull(kp);
} }
@Test
public void testJksProviderWithKeytoolKeys() throws Exception {
final Configuration conf = new Configuration();
final String keystoreDirAbsolutePath =
conf.getResource("hdfs7067.keystore").getPath();
final String ourUrl = JavaKeyStoreProvider.SCHEME_NAME + "://file@/" +
keystoreDirAbsolutePath;
conf.set(KeyProviderFactory.KEY_PROVIDER_PATH, ourUrl);
final KeyProvider provider = KeyProviderFactory.getProviders(conf).get(0);
// Sanity check that we are using the right keystore
@SuppressWarnings("unused")
final KeyProvider.KeyVersion keyVersion =
provider.getKeyVersion("testkey5@0");
try {
@SuppressWarnings("unused")
final KeyProvider.KeyVersion keyVersionWrongKeyNameFormat =
provider.getKeyVersion("testkey2");
fail("should have thrown an exception");
} catch (IOException e) {
// No version in key path testkey2/
GenericTestUtils.assertExceptionContains("No version in key path", e);
}
try {
@SuppressWarnings("unused")
final KeyProvider.KeyVersion keyVersionCurrentKeyNotWrongKeyNameFormat =
provider.getCurrentKey("testkey5@0");
fail("should have thrown an exception getting testkey5@0");
} catch (IOException e) {
// javax.crypto.spec.SecretKeySpec cannot be cast to
// org.apache.hadoop.crypto.key.JavaKeyStoreProvider$KeyMetadata
GenericTestUtils.assertExceptionContains("other non-Hadoop method", e);
}
try {
@SuppressWarnings("unused")
KeyProvider.KeyVersion keyVersionCurrentKeyNotReally =
provider.getCurrentKey("testkey2");
fail("should have thrown an exception getting testkey2");
} catch (IOException e) {
// javax.crypto.spec.SecretKeySpec cannot be cast to
// org.apache.hadoop.crypto.key.JavaKeyStoreProvider$KeyMetadata
GenericTestUtils.assertExceptionContains("other non-Hadoop method", e);
}
}
} }