HADOOP-15059. Undoing the switch of Credentials to PB format as default - done via HADOOP-12563 for supporting 2.x to 3.x upgrades.

This commit is contained in:
Vinod Kumar Vavilapalli (I am also known as @tshooter.) 2017-12-08 08:00:21 -08:00
parent ce04340ec7
commit f19638333b
4 changed files with 87 additions and 55 deletions

View File

@ -27,7 +27,6 @@ import java.io.DataOutput;
import java.io.DataOutputStream; import java.io.DataOutputStream;
import java.io.File; import java.io.File;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.Arrays; import java.util.Arrays;
@ -60,6 +59,28 @@ import org.slf4j.LoggerFactory;
@InterfaceAudience.Public @InterfaceAudience.Public
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class Credentials implements Writable { public class Credentials implements Writable {
public enum SerializedFormat {
WRITABLE((byte) 0x00),
PROTOBUF((byte) 0x01);
// Caching to avoid reconstructing the array each time.
private static final SerializedFormat[] FORMATS = values();
final byte value;
SerializedFormat(byte val) {
this.value = val;
}
public static SerializedFormat valueOf(int val) {
try {
return FORMATS[val];
} catch (ArrayIndexOutOfBoundsException e) {
throw new IllegalArgumentException("Unknown credential format: " + val);
}
}
}
private static final Logger LOG = LoggerFactory.getLogger(Credentials.class); private static final Logger LOG = LoggerFactory.getLogger(Credentials.class);
private Map<Text, byte[]> secretKeysMap = new HashMap<Text, byte[]>(); private Map<Text, byte[]> secretKeysMap = new HashMap<Text, byte[]>();
@ -224,63 +245,74 @@ public class Credentials implements Writable {
if (!Arrays.equals(magic, TOKEN_STORAGE_MAGIC)) { if (!Arrays.equals(magic, TOKEN_STORAGE_MAGIC)) {
throw new IOException("Bad header found in token storage."); throw new IOException("Bad header found in token storage.");
} }
byte version = in.readByte(); SerializedFormat format;
if (version != TOKEN_STORAGE_VERSION && try {
version != OLD_TOKEN_STORAGE_VERSION) { format = SerializedFormat.valueOf(in.readByte());
throw new IOException("Unknown version " + version + } catch (IllegalArgumentException e) {
" in token storage."); throw new IOException(e);
} }
if (version == OLD_TOKEN_STORAGE_VERSION) { switch (format) {
case WRITABLE:
readFields(in); readFields(in);
} else if (version == TOKEN_STORAGE_VERSION) { break;
case PROTOBUF:
readProto(in); readProto(in);
break;
default:
throw new IOException("Unsupported format " + format);
} }
} }
private static final byte[] TOKEN_STORAGE_MAGIC = private static final byte[] TOKEN_STORAGE_MAGIC =
"HDTS".getBytes(StandardCharsets.UTF_8); "HDTS".getBytes(StandardCharsets.UTF_8);
private static final byte TOKEN_STORAGE_VERSION = 1;
/**
* For backward compatibility.
*/
private static final byte OLD_TOKEN_STORAGE_VERSION = 0;
public void writeTokenStorageToStream(DataOutputStream os) public void writeTokenStorageToStream(DataOutputStream os)
throws IOException { throws IOException {
// by default store in the oldest supported format for compatibility
writeTokenStorageToStream(os, SerializedFormat.WRITABLE);
}
public void writeTokenStorageToStream(DataOutputStream os,
SerializedFormat format) throws IOException {
switch (format) {
case WRITABLE:
writeWritableOutputStream(os);
break;
case PROTOBUF:
writeProtobufOutputStream(os);
break;
default:
throw new IllegalArgumentException("Unsupported serialized format: "
+ format);
}
}
private void writeWritableOutputStream(DataOutputStream os)
throws IOException {
os.write(TOKEN_STORAGE_MAGIC); os.write(TOKEN_STORAGE_MAGIC);
os.write(TOKEN_STORAGE_VERSION); os.write(SerializedFormat.WRITABLE.value);
write(os);
}
private void writeProtobufOutputStream(DataOutputStream os)
throws IOException {
os.write(TOKEN_STORAGE_MAGIC);
os.write(SerializedFormat.PROTOBUF.value);
writeProto(os); writeProto(os);
} }
public void writeTokenStorageFile(Path filename, public void writeTokenStorageFile(Path filename,
Configuration conf) throws IOException { Configuration conf) throws IOException {
FSDataOutputStream os = filename.getFileSystem(conf).create(filename); // by default store in the oldest supported format for compatibility
writeTokenStorageToStream(os); writeTokenStorageFile(filename, conf, SerializedFormat.WRITABLE);
os.close();
} }
/** public void writeTokenStorageFile(Path filename, Configuration conf,
* For backward compatibility. SerializedFormat format) throws IOException {
*/ try (FSDataOutputStream os =
public void writeLegacyTokenStorageLocalFile(File f) throws IOException { filename.getFileSystem(conf).create(filename)) {
writeLegacyOutputStream(new DataOutputStream(new FileOutputStream(f))); writeTokenStorageToStream(os, format);
} }
/**
* For backward compatibility.
*/
public void writeLegacyTokenStorageFile(Path filename, Configuration conf)
throws IOException {
writeLegacyOutputStream(filename.getFileSystem(conf).create(filename));
}
private void writeLegacyOutputStream(DataOutputStream os) throws IOException {
os.write(TOKEN_STORAGE_MAGIC);
os.write(OLD_TOKEN_STORAGE_VERSION);
write(os);
os.close();
} }
/** /**
@ -312,7 +344,7 @@ public class Credentials implements Writable {
* @param out * @param out
* @throws IOException * @throws IOException
*/ */
public void writeProto(DataOutput out) throws IOException { void writeProto(DataOutput out) throws IOException {
CredentialsProto.Builder storage = CredentialsProto.newBuilder(); CredentialsProto.Builder storage = CredentialsProto.newBuilder();
for (Map.Entry<Text, Token<? extends TokenIdentifier>> e : for (Map.Entry<Text, Token<? extends TokenIdentifier>> e :
tokenMap.entrySet()) { tokenMap.entrySet()) {
@ -337,7 +369,7 @@ public class Credentials implements Writable {
* Populates keys/values from proto buffer storage. * Populates keys/values from proto buffer storage.
* @param in - stream ready to read a serialized proto buffer message * @param in - stream ready to read a serialized proto buffer message
*/ */
public void readProto(DataInput in) throws IOException { void readProto(DataInput in) throws IOException {
CredentialsProto storage = CredentialsProto.parseDelimitedFrom((DataInputStream)in); CredentialsProto storage = CredentialsProto.parseDelimitedFrom((DataInputStream)in);
for (CredentialsKVProto kv : storage.getTokensList()) { for (CredentialsKVProto kv : storage.getTokensList()) {
addToken(new Text(kv.getAliasBytes().toByteArray()), addToken(new Text(kv.getAliasBytes().toByteArray()),

View File

@ -102,11 +102,13 @@ public final class DtFileOperations {
public static void doFormattedWrite( public static void doFormattedWrite(
File f, String format, Credentials creds, Configuration conf) File f, String format, Credentials creds, Configuration conf)
throws IOException { throws IOException {
if (format == null || format.equals(FORMAT_PB)) { // default to oldest supported format for compatibility
creds.writeTokenStorageFile(fileToPath(f), conf); Credentials.SerializedFormat credsFormat =
} else { // if (format != null && format.equals(FORMAT_JAVA)) { Credentials.SerializedFormat.WRITABLE;
creds.writeLegacyTokenStorageLocalFile(f); if (format.equals(FORMAT_PB)) {
credsFormat = Credentials.SerializedFormat.PROTOBUF;
} }
creds.writeTokenStorageFile(fileToPath(f), conf, credsFormat);
} }
/** Print out a Credentials file from the local filesystem. /** Print out a Credentials file from the local filesystem.

View File

@ -20,7 +20,6 @@ package org.apache.hadoop.security.token;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.DataInputStream; import java.io.DataInputStream;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.io.PrintStream; import java.io.PrintStream;
@ -29,14 +28,12 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.security.token.DtFetcher;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@ -54,7 +51,6 @@ public class TestDtUtilShell {
private static Configuration defaultConf = new Configuration(); private static Configuration defaultConf = new Configuration();
private static FileSystem localFs = null; private static FileSystem localFs = null;
private final String alias = "proxy_ip:1234"; private final String alias = "proxy_ip:1234";
private final String renewer = "yarn";
private final String getUrl = SERVICE_GET.toString() + "://localhost:9000/"; private final String getUrl = SERVICE_GET.toString() + "://localhost:9000/";
private final String getUrl2 = "http://localhost:9000/"; private final String getUrl2 = "http://localhost:9000/";
public static Text SERVICE_GET = new Text("testTokenServiceGet"); public static Text SERVICE_GET = new Text("testTokenServiceGet");
@ -111,11 +107,12 @@ public class TestDtUtilShell {
Token<? extends TokenIdentifier> tok = (Token<? extends TokenIdentifier>) Token<? extends TokenIdentifier> tok = (Token<? extends TokenIdentifier>)
new Token(IDENTIFIER, PASSWORD, KIND, service); new Token(IDENTIFIER, PASSWORD, KIND, service);
creds.addToken(tok.getService(), tok); creds.addToken(tok.getService(), tok);
Credentials.SerializedFormat format =
Credentials.SerializedFormat.PROTOBUF;
if (legacy) { if (legacy) {
creds.writeLegacyTokenStorageLocalFile(new File(tokenPath.toString())); format = Credentials.SerializedFormat.WRITABLE;
} else {
creds.writeTokenStorageFile(tokenPath, defaultConf);
} }
creds.writeTokenStorageFile(tokenPath, defaultConf, format);
} }
@Test @Test
@ -284,6 +281,6 @@ public class TestDtUtilShell {
DataInputStream in = new DataInputStream( DataInputStream in = new DataInputStream(
new FileInputStream(tokenFilenameGet)); new FileInputStream(tokenFilenameGet));
spyCreds.readTokenStorageStream(in); spyCreds.readTokenStorageStream(in);
Mockito.verify(spyCreds).readProto(in); Mockito.verify(spyCreds, Mockito.never()).readFields(in);
} }
} }

View File

@ -182,7 +182,8 @@ public class DelegationTokenFetcher {
Credentials cred = new Credentials(); Credentials cred = new Credentials();
cred.addToken(token.getService(), token); cred.addToken(token.getService(), token);
// dtutil is replacing this tool; preserve legacy functionality // dtutil is replacing this tool; preserve legacy functionality
cred.writeLegacyTokenStorageFile(tokenFile, conf); cred.writeTokenStorageFile(tokenFile, conf,
Credentials.SerializedFormat.WRITABLE);
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("Fetched token " + fs.getUri() + " for " + LOG.debug("Fetched token " + fs.getUri() + " for " +