SOLR-14053: Fix HDFS tests to be ignored if Native I/O isn't available (remove tests.disableHdfs)

Signed-off-by: Kevin Risden <krisden@apache.org>
This commit is contained in:
Kevin Risden 2019-12-11 21:38:25 -05:00
parent d559f47222
commit 861c77cb17
No known key found for this signature in database
GPG Key ID: 040FAE3292C5F73F
6 changed files with 67 additions and 73 deletions

View File

@ -24,7 +24,6 @@ import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.hadoop.conf.Configuration;
import org.apache.lucene.util.Constants;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrRequest;
@ -32,8 +31,10 @@ import org.apache.solr.client.solrj.embedded.JettySolrRunner;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.client.solrj.response.CollectionAdminResponse;
import org.apache.solr.cloud.hdfs.HdfsTestUtil;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.IOUtils;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.handler.admin.CollectionsHandler;
import org.apache.solr.request.SolrQueryRequest;
@ -57,23 +58,21 @@ public class TestSolrCloudWithSecureImpersonation extends SolrTestCaseJ4 {
private static String getUsersFirstGroup() throws Exception {
String group = "*"; // accept any group if a group can't be found
if (!Constants.WINDOWS) { // does not work on Windows!
org.apache.hadoop.security.Groups hGroups =
new org.apache.hadoop.security.Groups(new Configuration());
try {
List<String> g = hGroups.getGroups(System.getProperty("user.name"));
if (g != null && g.size() > 0) {
group = g.get(0);
}
} catch (NullPointerException npe) {
// if user/group doesn't exist on test box
org.apache.hadoop.security.Groups hGroups =
new org.apache.hadoop.security.Groups(new Configuration());
try {
List<String> g = hGroups.getGroups(System.getProperty("user.name"));
if (g != null && g.size() > 0) {
group = g.get(0);
}
} catch (NullPointerException npe) {
// if user/group doesn't exist on test box
}
return group;
}
private static Map<String, String> getImpersonatorSettings() throws Exception {
Map<String, String> filterProps = new TreeMap<String, String>();
Map<String, String> filterProps = new TreeMap<>();
filterProps.put(KerberosPlugin.IMPERSONATOR_PREFIX + "noGroups.hosts", "*");
filterProps.put(KerberosPlugin.IMPERSONATOR_PREFIX + "anyHostAnyUser.groups", "*");
filterProps.put(KerberosPlugin.IMPERSONATOR_PREFIX + "anyHostAnyUser.hosts", "*");
@ -93,7 +92,7 @@ public class TestSolrCloudWithSecureImpersonation extends SolrTestCaseJ4 {
@BeforeClass
public static void startup() throws Exception {
assumeFalse("Hadoop does not work on Windows", Constants.WINDOWS);
HdfsTestUtil.checkAssumptions();
System.setProperty("authenticationPlugin", HttpParamDelegationTokenPlugin.class.getName());
System.setProperty(KerberosPlugin.DELEGATION_TOKEN_ENABLED, "true");
@ -142,32 +141,35 @@ public class TestSolrCloudWithSecureImpersonation extends SolrTestCaseJ4 {
}
@Before
public void clearCalledIndicator() throws Exception {
public void clearCalledIndicator() {
ImpersonatorCollectionsHandler.called.set(false);
}
@AfterClass
public static void shutdown() throws Exception {
if (miniCluster != null) {
miniCluster.shutdown();
}
miniCluster = null;
if (solrClient != null) {
solrClient.close();
IOUtils.closeQuietly(solrClient);
solrClient = null;
}
solrClient = null;
System.clearProperty("authenticationPlugin");
System.clearProperty(KerberosPlugin.DELEGATION_TOKEN_ENABLED);
System.clearProperty("solr.kerberos.cookie.domain");
Map<String, String> impSettings = getImpersonatorSettings();
for (Map.Entry<String, String> entry : impSettings.entrySet()) {
System.clearProperty(entry.getKey());
}
System.clearProperty("solr.test.sys.prop1");
System.clearProperty("solr.test.sys.prop2");
System.clearProperty("collectionsHandler");
try {
if (miniCluster != null) {
miniCluster.shutdown();
}
} finally {
miniCluster = null;
System.clearProperty("authenticationPlugin");
System.clearProperty(KerberosPlugin.DELEGATION_TOKEN_ENABLED);
System.clearProperty("solr.kerberos.cookie.domain");
Map<String, String> impSettings = getImpersonatorSettings();
for (Map.Entry<String, String> entry : impSettings.entrySet()) {
System.clearProperty(entry.getKey());
}
System.clearProperty("solr.test.sys.prop1");
System.clearProperty("solr.test.sys.prop2");
System.clearProperty("collectionsHandler");
SolrRequestParsers.DEFAULT.setAddRequestHeadersToContext(false);
SolrRequestParsers.DEFAULT.setAddRequestHeadersToContext(false);
}
}
private void create1ShardCollection(String name, String config, MiniSolrCloudCluster solrCluster) throws Exception {
@ -290,8 +292,7 @@ public class TestSolrCloudWithSecureImpersonation extends SolrTestCaseJ4 {
public void testProxyInvalidRemote() throws Exception {
HttpSolrClient.RemoteSolrException e = expectThrows(HttpSolrClient.RemoteSolrException.class,
() -> {
String invalidIpAddress = DEAD_HOST_2;
solrClient.request(getProxyRequest("localHostAnyGroup","bar", "[ff01::114]", invalidIpAddress));
solrClient.request(getProxyRequest("localHostAnyGroup","bar", "[ff01::114]", DEAD_HOST_2));
});
assertTrue(e.getMessage().contains(getExpectedHostExMsg("localHostAnyGroup")));
}
@ -319,15 +320,12 @@ public class TestSolrCloudWithSecureImpersonation extends SolrTestCaseJ4 {
// try a command to each node, one of them must be forwarded
for (JettySolrRunner jetty : miniCluster.getJettySolrRunners()) {
HttpSolrClient client =
new HttpSolrClient.Builder(jetty.getBaseUrl().toString() + "/" + collectionName).build();
try {
try (HttpSolrClient client = new HttpSolrClient.Builder(
jetty.getBaseUrl().toString() + "/" + collectionName).build()) {
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("q", "*:*");
params.set(USER_PARAM, "user");
client.query(params);
} finally {
client.close();
}
}
}

View File

@ -39,13 +39,13 @@ import org.apache.hadoop.hdfs.MiniDFSNNTopology;
import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
import org.apache.hadoop.hdfs.server.namenode.ha.HATestUtil;
import org.apache.hadoop.io.nativeio.NativeIO;
import org.apache.lucene.util.Constants;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.util.IOUtils;
import org.apache.solr.common.util.SuppressForbidden;
import org.apache.solr.core.DirectoryFactory;
import org.apache.solr.util.HdfsUtil;
import org.junit.AssumptionViolatedException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -73,6 +73,20 @@ public class HdfsTestUtil {
return setupClass(dir, haTesting, true);
}
public static void checkAssumptions() {
checkHadoopWindows();
checkFastDateFormat();
checkGeneratedIdMatches();
}
/**
* Hadoop integration tests fail on Windows without Hadoop NativeIO
*/
private static void checkHadoopWindows() {
LuceneTestCase.assumeTrue("Hadoop does not work on Windows without Hadoop NativeIO",
!Constants.WINDOWS || NativeIO.isAvailable());
}
/**
* Checks that commons-lang3 FastDateFormat works with configured locale
*/
@ -96,8 +110,7 @@ public class HdfsTestUtil {
}
public static MiniDFSCluster setupClass(String dir, boolean safeModeTesting, boolean haTesting) throws Exception {
checkFastDateFormat();
checkGeneratedIdMatches();
checkAssumptions();
if (!HA_TESTING_ENABLED) haTesting = false;
@ -132,10 +145,6 @@ public class HdfsTestUtil {
dfsClusterBuilder.nnTopology(MiniDFSNNTopology.simpleHATopology());
}
if (!NativeIO.isAvailable()) {
throw new AssumptionViolatedException("NativeIO not available for HDFS.");
}
MiniDFSCluster dfsCluster = dfsClusterBuilder.build();
HdfsUtil.TEST_CONF = getClientConfiguration(dfsCluster);
System.setProperty("solr.hdfs.home", getDataDir(dfsCluster, "solr_hdfs_home"));

View File

@ -24,7 +24,6 @@ import java.util.Set;
import org.apache.hadoop.security.authentication.client.PseudoAuthenticator;
import org.apache.hadoop.util.Time;
import org.apache.http.HttpStatus;
import org.apache.lucene.util.Constants;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
@ -35,10 +34,12 @@ import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.client.solrj.request.DelegationTokenRequest;
import org.apache.solr.client.solrj.response.DelegationTokenResponse;
import org.apache.solr.cloud.SolrCloudTestCase;
import org.apache.solr.cloud.hdfs.HdfsTestUtil;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.cloud.SolrZkClient;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.IOUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@ -51,7 +52,7 @@ public class TestDelegationWithHadoopAuth extends SolrCloudTestCase {
@BeforeClass
public static void setupClass() throws Exception {
assumeFalse("Hadoop does not work on Windows", Constants.WINDOWS);
HdfsTestUtil.checkAssumptions();
configureCluster(NUM_SERVERS)// nodes
.withSecurityJson(TEST_PATH().resolve("security").resolve("hadoop_simple_auth_with_delegation.json"))
@ -70,12 +71,12 @@ public class TestDelegationWithHadoopAuth extends SolrCloudTestCase {
@AfterClass
public static void tearDownClass() throws Exception {
if (primarySolrClient != null) {
primarySolrClient.close();
IOUtils.closeQuietly(primarySolrClient);
primarySolrClient = null;
}
if (secondarySolrClient != null) {
secondarySolrClient.close();
IOUtils.closeQuietly(secondarySolrClient);
secondarySolrClient = null;
}
}

View File

@ -23,12 +23,12 @@ import java.nio.file.Path;
import java.util.HashMap;
import java.util.Map;
import org.apache.lucene.util.Constants;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.cloud.SolrCloudTestCase;
import org.apache.solr.cloud.hdfs.HdfsTestUtil;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.Utils;
import org.apache.solr.security.HadoopAuthPlugin;
@ -50,7 +50,7 @@ public class TestImpersonationWithHadoopAuth extends SolrCloudTestCase {
@SuppressWarnings("unchecked")
@BeforeClass
public static void setupClass() throws Exception {
assumeFalse("Hadoop does not work on Windows", Constants.WINDOWS);
HdfsTestUtil.checkAssumptions();
InetAddress loopback = InetAddress.getLoopbackAddress();
Path securityJsonPath = TEST_PATH().resolve("security").resolve("hadoop_simple_auth_with_delegation.json");

View File

@ -20,7 +20,6 @@ import java.io.File;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.FileUtils;
import org.apache.lucene.util.Constants;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
@ -28,6 +27,7 @@ import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.cloud.AbstractDistribZkTestBase;
import org.apache.solr.cloud.KerberosTestServices;
import org.apache.solr.cloud.SolrCloudAuthTestCase;
import org.apache.solr.cloud.hdfs.HdfsTestUtil;
import org.apache.solr.common.SolrInputDocument;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@ -41,7 +41,7 @@ public class TestSolrCloudWithHadoopAuthPlugin extends SolrCloudAuthTestCase {
@BeforeClass
public static void setupClass() throws Exception {
assumeFalse("Hadoop does not work on Windows", Constants.WINDOWS);
HdfsTestUtil.checkAssumptions();
setupMiniKdc();

View File

@ -27,16 +27,14 @@ import java.util.List;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.util.Constants;
import org.apache.solr.cloud.MiniSolrCloudCluster;
import org.apache.solr.cloud.SolrCloudTestCase;
import org.apache.solr.cloud.hdfs.HdfsTestUtil;
import org.apache.solr.common.cloud.SecurityAwareZkACLProvider;
import org.apache.solr.common.cloud.SolrZkClient;
import org.apache.solr.common.cloud.VMParamsAllAndReadonlyDigestZkACLProvider;
import org.apache.solr.common.cloud.VMParamsSingleSetCredentialsDigestZkCredentialsProvider;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.zookeeper.WatchedEvent;
import org.apache.zookeeper.Watcher;
import org.apache.zookeeper.ZooDefs;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.data.ACL;
@ -58,7 +56,7 @@ public class TestZkAclsWithHadoopAuth extends SolrCloudTestCase {
@BeforeClass
public static void setupClass() throws Exception {
assumeFalse("Hadoop does not work on Windows", Constants.WINDOWS);
HdfsTestUtil.checkAssumptions();
System.setProperty(SolrZkClient.ZK_ACL_PROVIDER_CLASS_NAME_VM_PARAM_NAME,
VMParamsAllAndReadonlyDigestZkACLProvider.class.getName());
@ -88,16 +86,9 @@ public class TestZkAclsWithHadoopAuth extends SolrCloudTestCase {
@Test
public void testZkAcls() throws Exception {
ZooKeeper keeper = null;
try {
keeper = new ZooKeeper(cluster.getZkServer().getZkAddress(), (int) TimeUnit.MINUTES.toMillis(1), new Watcher() {
@Override
public void process(WatchedEvent arg0) {
// Do nothing
}
});
keeper.addAuthInfo("digest", ("solr:"+SOLR_PASSWD).getBytes(StandardCharsets.UTF_8));
try (ZooKeeper keeper = new ZooKeeper(cluster.getZkServer().getZkAddress(),
(int) TimeUnit.MINUTES.toMillis(1), arg0 -> {/* Do nothing */})) {
keeper.addAuthInfo("digest", ("solr:" + SOLR_PASSWD).getBytes(StandardCharsets.UTF_8));
// Test well known paths.
checkNonSecurityACLs(keeper, "/solr.xml");
@ -106,13 +97,8 @@ public class TestZkAclsWithHadoopAuth extends SolrCloudTestCase {
// Now test all ZK tree.
String zkHost = cluster.getSolrClient().getZkHost();
String zkChroot = zkHost.contains("/")? zkHost.substring(zkHost.indexOf("/")): null;
String zkChroot = zkHost.contains("/") ? zkHost.substring(zkHost.indexOf("/")) : null;
walkZkTree(keeper, zkChroot, "/");
} finally {
if (keeper != null) {
keeper.close();
}
}
}