Merge remote-tracking branch 'origin/master' into gradle-master

This commit is contained in:
Dawid Weiss 2019-12-13 10:51:14 +01:00
commit 035cede6b8
14 changed files with 105 additions and 12 deletions

View File

@ -1094,7 +1094,6 @@
<!-- Restrict access to certain Java features and install security manager: --> <!-- Restrict access to certain Java features and install security manager: -->
<sysproperty key="common.dir" file="${common.dir}" /> <sysproperty key="common.dir" file="${common.dir}" />
<sysproperty key="ant.library.dir" file="${ant.library.dir}" />
<sysproperty key="clover.db.dir" file="${clover.db.dir}" /> <sysproperty key="clover.db.dir" file="${clover.db.dir}" />
<syspropertyset> <syspropertyset>
<propertyref prefix="java.security.manager"/> <propertyref prefix="java.security.manager"/>

View File

@ -34,15 +34,8 @@ grant {
permission java.io.FilePermission "${clover.db.dir}${/}-", "read,write,delete"; permission java.io.FilePermission "${clover.db.dir}${/}-", "read,write,delete";
permission java.io.FilePermission "${tests.linedocsfile}", "read"; permission java.io.FilePermission "${tests.linedocsfile}", "read";
// hadoop // hadoop
permission java.io.FilePermission "${ant.library.dir}${/}-", "read";
permission java.io.FilePermission "${user.home}${/}.ant${/}lib${/}-", "read";
permission java.io.FilePermission "${user.home}${/}hadoop-metrics2.properties", "read"; permission java.io.FilePermission "${user.home}${/}hadoop-metrics2.properties", "read";
permission java.io.FilePermission "${user.home}${/}hadoop-metrics2-namenode.properties", "read"; permission java.io.FilePermission "${user.home}${/}hadoop-metrics2-namenode.properties", "read";
// kerberos
permission java.io.FilePermission "${user.home}${/}.java.login.config", "read";
// SolrTestCaseJ4 explicitly uses these
permission java.io.FilePermission "/dev/./urandom", "read";
permission java.io.FilePermission "/dev/random", "read";
// DirectoryFactoryTest messes with these (wtf?) // DirectoryFactoryTest messes with these (wtf?)
permission java.io.FilePermission "/tmp/inst1/conf/solrcore.properties", "read"; permission java.io.FilePermission "/tmp/inst1/conf/solrcore.properties", "read";
permission java.io.FilePermission "/path/to/myinst/conf/solrcore.properties", "read"; permission java.io.FilePermission "/path/to/myinst/conf/solrcore.properties", "read";

View File

@ -74,6 +74,7 @@ import org.slf4j.LoggerFactory;
@InterfaceAudience.Public @InterfaceAudience.Public
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class FileUtil { public class FileUtil {
public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
private static final Logger LOG = LoggerFactory.getLogger(FileUtil.class); private static final Logger LOG = LoggerFactory.getLogger(FileUtil.class);

View File

@ -38,6 +38,7 @@ import static java.nio.file.Files.createLink;
* efficient - and minimizes the impact of the extra buffer creations. * efficient - and minimizes the impact of the extra buffer creations.
*/ */
public class HardLink { public class HardLink {
public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
public final LinkStats linkStats; //not static public final LinkStats linkStats; //not static

View File

@ -62,6 +62,8 @@ import org.apache.hadoop.util.StringUtils;
@InterfaceAudience.Public @InterfaceAudience.Public
@InterfaceStability.Stable @InterfaceStability.Stable
public class RawLocalFileSystem extends FileSystem { public class RawLocalFileSystem extends FileSystem {
public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
static final URI NAME = URI.create("file:///"); static final URI NAME = URI.create("file:///");
private Path workingDir; private Path workingDir;
// Temporary workaround for HADOOP-9652. // Temporary workaround for HADOOP-9652.

View File

@ -78,7 +78,9 @@ import com.google.common.annotations.VisibleForTesting;
* *
* This class is synchronized by {@link FsVolumeImpl}. * This class is synchronized by {@link FsVolumeImpl}.
*/ */
class BlockPoolSlice { public class BlockPoolSlice {
public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
static final Logger LOG = LoggerFactory.getLogger(BlockPoolSlice.class); static final Logger LOG = LoggerFactory.getLogger(BlockPoolSlice.class);
private final String bpid; private final String bpid;

View File

@ -32,6 +32,8 @@ import org.apache.hadoop.conf.Configuration;
*/ */
@InterfaceAudience.Private @InterfaceAudience.Private
public class NameNodeResourceChecker { public class NameNodeResourceChecker {
public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
/** /**
* Create a NameNodeResourceChecker, which will check the edits dirs and any * Create a NameNodeResourceChecker, which will check the edits dirs and any
* additional dirs to check set in <code>conf</code>. * additional dirs to check set in <code>conf</code>.

View File

@ -116,6 +116,8 @@ import org.slf4j.LoggerFactory;
@InterfaceAudience.Private @InterfaceAudience.Private
@InterfaceStability.Evolving @InterfaceStability.Evolving
public final class HttpServer2 implements FilterContainer { public final class HttpServer2 implements FilterContainer {
public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
public static final Logger LOG = LoggerFactory.getLogger(HttpServer2.class); public static final Logger LOG = LoggerFactory.getLogger(HttpServer2.class);
public static final String HTTP_SCHEME = "http"; public static final String HTTP_SCHEME = "http";

View File

@ -0,0 +1,39 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* The classes under this package were copied from Apache Hadoop and modified
* to avoid certain issues during tests. The copied classes override the
* Apache Hadoop dependency versions during tests.
*
* HttpServer2 class was copied to avoid Jetty 9.4 dependency issues. Since
* Solr uses Jetty 9.4, Hadoop integration tests needs to use Jetty 9.4 as
* well. The HttpServer2 class should be removed when Hadoop is upgraded to
* 3.3.0 due to HADOOP-16152 upgrading Hadoop to Jetty 9.4.
*
* The classes BlockPoolSlice (HDFS-14251), DiskChecker, FileUtil, HardLink,
* NameNodeResourceChecker, and RawLocalFileSystem were copied to avoid
* issues with running Hadoop integration tests under the Java security
* manager. Many of these classes use org.apache.hadoop.util.Shell
* which shells out to try to do common filesystem checks.
*
* Overtime these classes should be removed as upstream fixes to Apache
* Hadoop are made. When the Apache Hadoop dependency is upgraded in
* Solr, the classes should be compared against that version.
*/
package org.apache.hadoop;

View File

@ -43,6 +43,8 @@ import org.slf4j.LoggerFactory;
@InterfaceAudience.Private @InterfaceAudience.Private
@InterfaceStability.Unstable @InterfaceStability.Unstable
public class DiskChecker { public class DiskChecker {
public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
public static final Logger LOG = LoggerFactory.getLogger(DiskChecker.class); public static final Logger LOG = LoggerFactory.getLogger(DiskChecker.class);
public static class DiskErrorException extends IOException { public static class DiskErrorException extends IOException {

View File

@ -51,6 +51,7 @@ import org.apache.solr.common.util.Utils;
import org.apache.solr.core.SolrResourceLoader; import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.util.LogLevel; import org.apache.solr.util.LogLevel;
import org.apache.zookeeper.data.Stat; import org.apache.zookeeper.data.Stat;
import org.junit.AfterClass;
import org.junit.Before; import org.junit.Before;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
@ -93,6 +94,11 @@ public class SearchRateTriggerIntegrationTest extends SolrCloudTestCase {
} }
@AfterClass
public static void cleanUpAfterClass() throws Exception {
cloudManager = null;
}
@Before @Before
public void beforeTest() throws Exception { public void beforeTest() throws Exception {
cluster.deleteAllCollections(); cluster.deleteAllCollections();

View File

@ -19,8 +19,10 @@ package org.apache.solr.cloud.hdfs;
import java.io.File; import java.io.File;
import java.lang.invoke.MethodHandles; import java.lang.invoke.MethodHandles;
import java.net.URI; import java.net.URI;
import java.util.Arrays;
import java.util.Enumeration; import java.util.Enumeration;
import java.util.HashMap; import java.util.HashMap;
import java.util.List;
import java.util.Locale; import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.Timer; import java.util.Timer;
@ -33,12 +35,19 @@ import org.apache.commons.lang3.time.FastDateFormat;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.HardLink;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RawLocalFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.MiniDFSNNTopology; import org.apache.hadoop.hdfs.MiniDFSNNTopology;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.BlockPoolSlice;
import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter; import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
import org.apache.hadoop.hdfs.server.namenode.NameNodeResourceChecker;
import org.apache.hadoop.hdfs.server.namenode.ha.HATestUtil; import org.apache.hadoop.hdfs.server.namenode.ha.HATestUtil;
import org.apache.hadoop.http.HttpServer2;
import org.apache.hadoop.io.nativeio.NativeIO; import org.apache.hadoop.io.nativeio.NativeIO;
import org.apache.hadoop.util.DiskChecker;
import org.apache.lucene.util.Constants; import org.apache.lucene.util.Constants;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.SolrTestCaseJ4;
@ -54,6 +63,8 @@ import static org.apache.lucene.util.LuceneTestCase.random;
public class HdfsTestUtil { public class HdfsTestUtil {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private static final String SOLR_HACK_FOR_CLASS_VERIFICATION_FIELD = "SOLR_HACK_FOR_CLASS_VERIFICATION";
private static final String LOGICAL_HOSTNAME = "ha-nn-uri-%d"; private static final String LOGICAL_HOSTNAME = "ha-nn-uri-%d";
private static final boolean HA_TESTING_ENABLED = false; // SOLR-XXX private static final boolean HA_TESTING_ENABLED = false; // SOLR-XXX
@ -76,6 +87,7 @@ public class HdfsTestUtil {
public static void checkAssumptions() { public static void checkAssumptions() {
ensureHadoopHomeNotSet(); ensureHadoopHomeNotSet();
checkHadoopWindows(); checkHadoopWindows();
checkOverriddenHadoopClasses();
checkFastDateFormat(); checkFastDateFormat();
checkGeneratedIdMatches(); checkGeneratedIdMatches();
} }
@ -103,6 +115,23 @@ public class HdfsTestUtil {
!Constants.WINDOWS || NativeIO.isAvailable()); !Constants.WINDOWS || NativeIO.isAvailable());
} }
/**
* Ensure that the tests are picking up the modified Hadoop classes
*/
private static void checkOverriddenHadoopClasses() {
List<Class<?>> modifiedHadoopClasses = Arrays.asList(BlockPoolSlice.class, DiskChecker.class,
FileUtil.class, HardLink.class, HttpServer2.class, NameNodeResourceChecker.class, RawLocalFileSystem.class);
for (Class<?> clazz : modifiedHadoopClasses) {
try {
LuceneTestCase.assertNotNull("Field on " + clazz.getCanonicalName() + " should not have been null",
clazz.getField(SOLR_HACK_FOR_CLASS_VERIFICATION_FIELD));
} catch (NoSuchFieldException e) {
LuceneTestCase.fail("Expected to load Solr modified Hadoop class " + clazz.getCanonicalName() +
" , but it was not found.");
}
}
}
/** /**
* Checks that commons-lang3 FastDateFormat works with configured locale * Checks that commons-lang3 FastDateFormat works with configured locale
*/ */

View File

@ -152,6 +152,9 @@ public class ConcurrentUpdateHttp2SolrClient extends SolrClient {
this.streamDeletes = builder.streamDeletes; this.streamDeletes = builder.streamDeletes;
this.basePath = builder.baseSolrUrl; this.basePath = builder.baseSolrUrl;
this.stallTime = Integer.getInteger("solr.cloud.client.stallTime", 15000); this.stallTime = Integer.getInteger("solr.cloud.client.stallTime", 15000);
if (stallTime < pollQueueTime * 2) {
throw new RuntimeException("Invalid stallTime: " + stallTime + "ms, must be 2x > pollQueueTime " + pollQueueTime);
}
if (builder.executorService != null) { if (builder.executorService != null) {
this.scheduler = builder.executorService; this.scheduler = builder.executorService;
@ -214,7 +217,6 @@ public class ConcurrentUpdateHttp2SolrClient extends SolrClient {
try { try {
Update update; Update update;
notifyQueueAndRunnersIfEmptyQueue(); notifyQueueAndRunnersIfEmptyQueue();
//log.info("-- polling 1");
update = queue.poll(pollQueueTime, TimeUnit.MILLISECONDS); update = queue.poll(pollQueueTime, TimeUnit.MILLISECONDS);
if (update == null) { if (update == null) {
@ -662,7 +664,12 @@ public class ConcurrentUpdateHttp2SolrClient extends SolrClient {
*/ */
public void setPollQueueTime(int pollQueueTime) { public void setPollQueueTime(int pollQueueTime) {
this.pollQueueTime = pollQueueTime; this.pollQueueTime = pollQueueTime;
this.stallTime = this.pollQueueTime * 3 / 2; // make sure the stall time is larger than the polling time
// to give a chance for the queue to change
int minimalStallTime = pollQueueTime * 2;
if (minimalStallTime > this.stallTime) {
this.stallTime = minimalStallTime;
}
} }
/** /**

View File

@ -134,7 +134,9 @@ public class ConcurrentUpdateSolrClient extends SolrClient {
this.connectionTimeout = builder.connectionTimeoutMillis; this.connectionTimeout = builder.connectionTimeoutMillis;
this.soTimeout = builder.socketTimeoutMillis; this.soTimeout = builder.socketTimeoutMillis;
this.stallTime = Integer.getInteger("solr.cloud.client.stallTime", 15000); this.stallTime = Integer.getInteger("solr.cloud.client.stallTime", 15000);
if (stallTime < pollQueueTime * 2) {
throw new RuntimeException("Invalid stallTime: " + stallTime + "ms, must be 2x > pollQueueTime " + pollQueueTime);
}
if (builder.executorService != null) { if (builder.executorService != null) {
this.scheduler = builder.executorService; this.scheduler = builder.executorService;
@ -827,6 +829,12 @@ public class ConcurrentUpdateSolrClient extends SolrClient {
*/ */
public void setPollQueueTime(int pollQueueTime) { public void setPollQueueTime(int pollQueueTime) {
this.pollQueueTime = pollQueueTime; this.pollQueueTime = pollQueueTime;
// make sure the stall time is larger than the polling time
// to give a chance for the queue to change
int minimalStallTime = pollQueueTime * 2;
if (minimalStallTime > this.stallTime) {
this.stallTime = minimalStallTime;
}
} }
public void setRequestWriter(RequestWriter requestWriter) { public void setRequestWriter(RequestWriter requestWriter) {