HDFS-14129. addendum to HDFS-14129. Contributed by Ranith Sardar.

This commit is contained in:
Surendra Singh Lilhore 2019-01-16 11:42:17 +05:30 committed by Brahma Reddy Battula
parent f40e10b349
commit 7b61cbf672
3 changed files with 194 additions and 0 deletions

View File

@ -0,0 +1,34 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.protocolPB;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hdfs.server.federation.resolver.MountTableManager;
import org.apache.hadoop.hdfs.server.federation.router.NameserviceManager;
import org.apache.hadoop.hdfs.server.federation.router.RouterStateManager;
import org.apache.hadoop.ipc.GenericRefreshProtocol;
/**
* Protocol used by routeradmin to communicate with statestore.
*/
@InterfaceAudience.Private
@InterfaceStability.Stable
public interface RouterAdminProtocol extends MountTableManager,
RouterStateManager, NameserviceManager, GenericRefreshProtocol {
}

View File

@ -0,0 +1,52 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.protocolPB;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.hdfs.HDFSPolicyProvider;
import org.apache.hadoop.security.authorize.Service;
/**
* {@link HDFSPolicyProvider} for RBF protocols.
*/
@InterfaceAudience.Private
public class RouterPolicyProvider extends HDFSPolicyProvider {
private static final Service[] RBF_SERVICES = new Service[] {
new Service(CommonConfigurationKeys.SECURITY_ROUTER_ADMIN_PROTOCOL_ACL,
RouterAdminProtocol.class) };
private final Service[] services;
public RouterPolicyProvider() {
List<Service> list = new ArrayList<>();
list.addAll(Arrays.asList(super.getServices()));
list.addAll(Arrays.asList(RBF_SERVICES));
services = list.toArray(new Service[list.size()]);
}
@Override
public Service[] getServices() {
return Arrays.copyOf(services, services.length);
}
}

View File

@ -0,0 +1,108 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.federation.router;
import org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer;
import static org.junit.Assert.*;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.commons.lang3.ClassUtils;
import org.apache.hadoop.hdfs.protocolPB.RouterPolicyProvider;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.security.authorize.Service;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Sets;
/**
* Test suite covering RouterPolicyProvider. We expect that it contains a
* security policy definition for every RPC protocol used in HDFS. The test
* suite works by scanning an RPC server's class to find the protocol interfaces
* it implements, and then comparing that to the protocol interfaces covered in
* RouterPolicyProvider. This is a parameterized test repeated for multiple HDFS
* RPC server classes.
*/
@RunWith(Parameterized.class)
public class TestRouterPolicyProvider {
private static final Logger LOG = LoggerFactory.getLogger(
TestRouterPolicyProvider.class);
private static Set<Class<?>> policyProviderProtocols;
@Rule
public TestName testName = new TestName();
private final Class<?> rpcServerClass;
@BeforeClass
public static void initialize() {
Service[] services = new RouterPolicyProvider().getServices();
policyProviderProtocols = new HashSet<>(services.length);
for (Service service : services) {
policyProviderProtocols.add(service.getProtocol());
}
}
public TestRouterPolicyProvider(Class<?> rpcServerClass) {
this.rpcServerClass = rpcServerClass;
}
@Parameters(name = "protocolsForServer-{0}")
public static List<Class<?>[]> data() {
return Arrays.asList(new Class<?>[][] {{RouterRpcServer.class},
{NameNodeRpcServer.class}, {DataNode.class},
{RouterAdminServer.class}});
}
@Test
public void testPolicyProviderForServer() {
List<?> ifaces = ClassUtils.getAllInterfaces(rpcServerClass);
Set<Class<?>> serverProtocols = new HashSet<>(ifaces.size());
for (Object obj : ifaces) {
Class<?> iface = (Class<?>) obj;
if (iface.getSimpleName().endsWith("Protocol")) {
serverProtocols.add(iface);
}
}
LOG.info("Running test {} for RPC server {}. Found server protocols {} "
+ "and policy provider protocols {}.", testName.getMethodName(),
rpcServerClass.getName(), serverProtocols, policyProviderProtocols);
assertFalse("Expected to find at least one protocol in server.",
serverProtocols.isEmpty());
final Set<Class<?>> differenceSet = Sets.difference(serverProtocols,
policyProviderProtocols);
assertTrue(String.format(
"Following protocols for server %s are not defined in " + "%s: %s",
rpcServerClass.getName(), RouterPolicyProvider.class.getName(), Arrays
.toString(differenceSet.toArray())), differenceSet.isEmpty());
}
}