HADOOP-13494. ReconfigurableBase can log sensitive information. Contributed by Sean Mackrory.

This commit is contained in:
Andrew Wang 2016-08-16 15:01:18 -07:00
parent 6c154abd33
commit 4b689e7a75
5 changed files with 190 additions and 3 deletions

View File

@ -0,0 +1,84 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.conf;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.regex.Pattern;
import static org.apache.hadoop.fs.CommonConfigurationKeys.*;
/**
* Tool for redacting sensitive information when displaying config parameters.
*
* <p>Some config parameters contain sensitive information (for example, cloud
* storage keys). When these properties are displayed in plaintext, we should
* redactor their values as appropriate.
*/
public class ConfigRedactor {
private static final String REDACTED_TEXT = "<redacted>";
private List<Pattern> compiledPatterns;
public ConfigRedactor(Configuration conf) {
String sensitiveRegexList = conf.get(
HADOOP_SECURITY_SENSITIVE_CONFIG_KEYS,
HADOOP_SECURITY_SENSITIVE_CONFIG_KEYS_DEFAULT);
List<String> sensitiveRegexes = Arrays.asList(sensitiveRegexList.split(","));
compiledPatterns = new ArrayList<Pattern>();
for (String regex : sensitiveRegexes) {
Pattern p = Pattern.compile(regex);
compiledPatterns.add(p);
}
}
/**
* Given a key / value pair, decides whether or not to redact and returns
* either the original value or text indicating it has been redacted.
*
* @param key
* @param value
* @return Original value, or text indicating it has been redacted
*/
public String redact(String key, String value) {
if (configIsSensitive(key)) {
return REDACTED_TEXT;
}
return value;
}
/**
* Matches given config key against patterns and determines whether or not
* it should be considered sensitive enough to redact in logs and other
* plaintext displays.
*
* @param key
* @return True if parameter is considered sensitive
*/
private boolean configIsSensitive(String key) {
for (Pattern regex : compiledPatterns) {
if (regex.matcher(key).find()) {
return true;
}
}
return false;
}
}

View File

@ -117,17 +117,24 @@ public abstract class ReconfigurableBase
final Collection<PropertyChange> changes =
parent.getChangedProperties(newConf, oldConf);
Map<PropertyChange, Optional<String>> results = Maps.newHashMap();
ConfigRedactor oldRedactor = new ConfigRedactor(oldConf);
ConfigRedactor newRedactor = new ConfigRedactor(newConf);
for (PropertyChange change : changes) {
String errorMessage = null;
String oldValRedacted = oldRedactor.redact(change.prop, change.oldVal);
String newValRedacted = newRedactor.redact(change.prop, change.newVal);
if (!parent.isPropertyReconfigurable(change.prop)) {
LOG.info(String.format(
"Property %s is not configurable: old value: %s, new value: %s",
change.prop, change.oldVal, change.newVal));
change.prop,
oldValRedacted,
newValRedacted));
continue;
}
LOG.info("Change property: " + change.prop + " from \""
+ ((change.oldVal == null) ? "<default>" : change.oldVal)
+ "\" to \"" + ((change.newVal == null) ? "<default>" : change.newVal)
+ ((change.oldVal == null) ? "<default>" : oldValRedacted)
+ "\" to \""
+ ((change.newVal == null) ? "<default>" : newValRedacted)
+ "\".");
try {
String effectiveValue =

View File

@ -753,5 +753,19 @@ public class CommonConfigurationKeysPublic {
*/
public static final String HADOOP_SECURITY_CREDENTIAL_PASSWORD_FILE_KEY =
"hadoop.security.credstore.java-keystore-provider.password-file";
/**
* @see
* <a href="{@docRoot}/../hadoop-project-dist/hadoop-common/core-default.xml">
* core-default.xml</a>
*/
public static final String HADOOP_SECURITY_SENSITIVE_CONFIG_KEYS =
"hadoop.security.sensitive-config-keys";
public static final String HADOOP_SECURITY_SENSITIVE_CONFIG_KEYS_DEFAULT =
"password$" + "," +
"fs.s3.*[Ss]ecret.?[Kk]ey" + "," +
"fs.azure\\.account.key.*" + "," +
"dfs.webhdfs.oauth2.[a-z]+.token" + "," +
HADOOP_SECURITY_SENSITIVE_CONFIG_KEYS;
}

View File

@ -502,6 +502,16 @@
</description>
</property>
<property>
<name>hadoop.security.sensitive-config-keys</name>
<value>password$,fs.s3.*[Ss]ecret.?[Kk]ey,fs.azure.account.key.*,dfs.webhdfs.oauth2.[a-z]+.token,hadoop.security.sensitive-config-keys</value>
<description>A comma-separated list of regular expressions to match against
configuration keys that should be redacted where appropriate, for
example, when logging modified properties during a reconfiguration,
private credentials should not be logged.
</description>
</property>
<property>
<name>hadoop.workaround.non.threadsafe.getpwuid</name>
<value>true</value>

View File

@ -0,0 +1,72 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.conf;
import org.junit.Assert;
import org.junit.Test;
import java.util.Arrays;
import java.util.List;
/**
* Tests the tool (and the default expression) for deciding which config
* redact.
*/
public class TestConfigRedactor {
private static final String REDACTED_TEXT = "<redacted>";
private static final String ORIGINAL_VALUE = "Hello, World!";
@Test
public void redact() throws Exception {
Configuration conf = new Configuration();
ConfigRedactor redactor = new ConfigRedactor(conf);
String processedText;
List<String> sensitiveKeys = Arrays.asList(
"fs.s3a.secret.key",
"fs.s3n.awsSecretKey",
"fs.azure.account.key.abcdefg.blob.core.windows.net",
"dfs.webhdfs.oauth2.access.token",
"dfs.webhdfs.oauth2.refresh.token",
"ssl.server.keystore.keypassword",
"ssl.server.keystore.password",
"hadoop.security.sensitive-config-keys"
);
for (String key : sensitiveKeys) {
processedText = redactor.redact(key, ORIGINAL_VALUE);
Assert.assertEquals(
"Config parameter wasn't redacted and should be: " + key,
REDACTED_TEXT, processedText);
}
List<String> normalKeys = Arrays.asList(
"fs.defaultFS",
"dfs.replication",
"ssl.server.keystore.location",
"hadoop.security.credstore.java-keystore-provider.password-file"
);
for (String key : normalKeys) {
processedText = redactor.redact(key, ORIGINAL_VALUE);
Assert.assertEquals(
"Config parameter was redacted and shouldn't be: " + key,
ORIGINAL_VALUE, processedText);
}
}
}