HADOOP-16958. NPE when hadoop.security.authorization is enabled but the input PolicyProvider for ZKFCRpcServer is NULL. Contributed by Ctest.

This commit is contained in:
Ayush Saxena 2020-04-14 02:13:42 +05:30
parent 11d17417ce
commit 3edbe8708a
2 changed files with 54 additions and 0 deletions

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.ha;
import java.io.IOException;
import java.net.InetSocketAddress;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
@ -63,6 +64,12 @@ public class ZKFCRpcServer implements ZKFCProtocol {
// set service-level authorization security policy
if (conf.getBoolean(
CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false)) {
if (policy == null) {
throw new HadoopIllegalArgumentException(
CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION
+ "is configured to true but service-level"
+ "authorization security policy is null.");
}
server.refreshServiceAcl(conf, policy);
}

View File

@ -19,15 +19,22 @@ package org.apache.hadoop.ha;
import static org.junit.Assert.*;
import java.net.InetSocketAddress;
import java.security.NoSuchAlgorithmException;
import com.google.common.base.Supplier;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
import org.apache.hadoop.ha.HAServiceProtocol.StateChangeRequestInfo;
import org.apache.hadoop.ha.HealthMonitor.State;
import org.apache.hadoop.ha.MiniZKFCCluster.DummyZKFC;
import org.apache.hadoop.security.authorize.PolicyProvider;
import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol;
import org.apache.hadoop.security.authorize.Service;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.LambdaTestUtils;
import org.apache.hadoop.util.Time;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.ZooKeeper;
@ -128,6 +135,46 @@ public class TestZKFailoverController extends ClientBaseWithFixes {
runFC(svc));
}
@Test
public void testPolicyProviderForZKFCRpcServer() throws Exception {
Configuration myconf = new Configuration();
myconf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION,
true);
DummyHAService dummyHAService = new DummyHAService(HAServiceState.ACTIVE,
new InetSocketAddress(0), false);
MiniZKFCCluster.DummyZKFC dummyZKFC =
new MiniZKFCCluster.DummyZKFC(myconf, dummyHAService);
// initialize ZKFCRpcServer with null policy
LambdaTestUtils.intercept(HadoopIllegalArgumentException.class,
CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION
+ "is configured to true but service-level"
+ "authorization security policy is null.",
() -> new ZKFCRpcServer(myconf, new InetSocketAddress(0),
dummyZKFC, null));
// initialize ZKFCRpcServer with dummy policy
PolicyProvider dummyPolicy = new PolicyProvider() {
private final Service[] services = new Service[] {
new Service(CommonConfigurationKeys.SECURITY_ZKFC_PROTOCOL_ACL,
ZKFCProtocol.class),
new Service(
CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_AUTHORIZATION_REFRESH_POLICY,
RefreshAuthorizationPolicyProtocol.class),
};
@Override
public Service[] getServices() {
return this.services;
}
};
ZKFCRpcServer server = new ZKFCRpcServer(myconf,
new InetSocketAddress(0), dummyZKFC, dummyPolicy);
server.start();
server.stopAndJoin();
}
@Test
public void testFormatOneClusterLeavesOtherClustersAlone() throws Exception {
DummyHAService svc = cluster.getService(1);