SOLR-7289: Tests should not ignore all leaking threads and instead just ignore the known leaking threads.

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1668771 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Mark Robert Miller 2015-03-24 02:16:13 +00:00
parent 5c735c2728
commit 84adc8604d
24 changed files with 330 additions and 150 deletions

View File

@ -16,13 +16,15 @@
*/ */
package org.apache.solr.hadoop; package org.apache.solr.hadoop;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction; import java.io.File;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction.Action; import java.io.IOException;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering; import java.io.OutputStream;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import java.io.OutputStreamWriter;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope; import java.io.Writer;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies; import java.lang.reflect.Array;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies.Consequence; import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -42,24 +44,17 @@ import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.cloud.AbstractZkTestCase; import org.apache.solr.cloud.AbstractZkTestCase;
import org.apache.solr.hadoop.hack.MiniMRCluster; import org.apache.solr.hadoop.hack.MiniMRCluster;
import org.apache.solr.morphlines.solr.AbstractSolrMorphlineTestBase; import org.apache.solr.morphlines.solr.AbstractSolrMorphlineTestBase;
import org.apache.solr.util.BadHdfsThreadsFilter;
import org.junit.After; import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import java.io.File; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.lang.reflect.Array;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
@ThreadLeakAction({Action.WARN}) @ThreadLeakFilters(defaultFilters = true, filters = {
@ThreadLeakLingering(linger = 0) BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
@ThreadLeakZombies(Consequence.CONTINUE) })
@ThreadLeakScope(Scope.NONE)
@Slow @Slow
public class MorphlineBasicMiniMRTest extends SolrTestCaseJ4 { public class MorphlineBasicMiniMRTest extends SolrTestCaseJ4 {

View File

@ -16,13 +16,24 @@
*/ */
package org.apache.solr.hadoop; package org.apache.solr.hadoop;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction; import java.io.File;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction.Action; import java.io.IOException;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering; import java.io.OutputStream;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import java.io.OutputStreamWriter;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope; import java.io.UnsupportedEncodingException;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies; import java.io.Writer;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies.Consequence; import java.lang.reflect.Array;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -61,32 +72,24 @@ import org.apache.solr.common.util.NamedList;
import org.apache.solr.hadoop.hack.MiniMRClientCluster; import org.apache.solr.hadoop.hack.MiniMRClientCluster;
import org.apache.solr.hadoop.hack.MiniMRClientClusterFactory; import org.apache.solr.hadoop.hack.MiniMRClientClusterFactory;
import org.apache.solr.morphlines.solr.AbstractSolrMorphlineTestBase; import org.apache.solr.morphlines.solr.AbstractSolrMorphlineTestBase;
import org.apache.solr.util.BadHdfsThreadsFilter;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import java.io.File; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction;
import java.io.IOException; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction.Action;
import java.io.OutputStream; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import java.io.OutputStreamWriter; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
import java.io.UnsupportedEncodingException; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies;
import java.io.Writer; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies.Consequence;
import java.lang.reflect.Array;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
@ThreadLeakAction({Action.WARN}) @ThreadLeakAction({Action.WARN})
@ThreadLeakLingering(linger = 0) @ThreadLeakLingering(linger = 0)
@ThreadLeakZombies(Consequence.CONTINUE) @ThreadLeakZombies(Consequence.CONTINUE)
@ThreadLeakScope(Scope.NONE) @ThreadLeakFilters(defaultFilters = true, filters = {
BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
})
@SuppressSSL // SSL does not work with this test for currently unknown reasons @SuppressSSL // SSL does not work with this test for currently unknown reasons
@Slow @Slow
public class MorphlineGoLiveMiniMRTest extends AbstractFullDistribZkTestBase { public class MorphlineGoLiveMiniMRTest extends AbstractFullDistribZkTestBase {

View File

@ -16,13 +16,10 @@
*/ */
package org.apache.solr.morphlines.solr; package org.apache.solr.morphlines.solr;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction; import java.io.File;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction.Action; import java.io.IOException;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering; import java.util.Iterator;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies.Consequence;
import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.SolrServerException;
@ -32,20 +29,17 @@ import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.params.CollectionParams.CollectionAction; import org.apache.solr.common.params.CollectionParams.CollectionAction;
import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.NamedList;
import org.junit.BeforeClass; import org.apache.solr.util.BadHdfsThreadsFilter;
import org.junit.Test; import org.junit.Test;
import org.kitesdk.morphline.api.Record; import org.kitesdk.morphline.api.Record;
import org.kitesdk.morphline.base.Fields; import org.kitesdk.morphline.base.Fields;
import org.kitesdk.morphline.base.Notifications; import org.kitesdk.morphline.base.Notifications;
import java.io.File; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import java.io.IOException;
import java.util.Iterator;
@ThreadLeakAction({Action.WARN}) @ThreadLeakFilters(defaultFilters = true, filters = {
@ThreadLeakLingering(linger = 0) BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
@ThreadLeakZombies(Consequence.CONTINUE) })
@ThreadLeakScope(Scope.NONE)
@Slow @Slow
public class SolrMorphlineZkAliasTest extends AbstractSolrMorphlineZkTestBase { public class SolrMorphlineZkAliasTest extends AbstractSolrMorphlineZkTestBase {

View File

@ -16,16 +16,13 @@
*/ */
package org.apache.solr.morphlines.solr; package org.apache.solr.morphlines.solr;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction; import java.io.File;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction.Action; import java.util.ArrayList;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering; import java.util.Collections;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import java.util.Comparator;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope; import java.util.Iterator;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies; import java.util.List;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies.Consequence;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.io.Files;
import org.apache.avro.Schema.Field; import org.apache.avro.Schema.Field;
import org.apache.avro.file.DataFileReader; import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.FileReader; import org.apache.avro.file.FileReader;
@ -35,23 +32,25 @@ import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocument;
import org.junit.BeforeClass; import org.apache.solr.util.BadHdfsThreadsFilter;
import org.junit.Test; import org.junit.Test;
import org.kitesdk.morphline.api.Record; import org.kitesdk.morphline.api.Record;
import org.kitesdk.morphline.base.Fields; import org.kitesdk.morphline.base.Fields;
import org.kitesdk.morphline.base.Notifications; import org.kitesdk.morphline.base.Notifications;
import java.io.File; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction;
import java.util.ArrayList; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction.Action;
import java.util.Collections; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import java.util.Comparator; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
import java.util.Iterator; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies;
import java.util.List; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies.Consequence;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.io.Files;
@ThreadLeakAction({Action.WARN}) @ThreadLeakFilters(defaultFilters = true, filters = {
@ThreadLeakLingering(linger = 0) BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
@ThreadLeakZombies(Consequence.CONTINUE) })
@ThreadLeakScope(Scope.NONE)
@Slow @Slow
public class SolrMorphlineZkAvroTest extends AbstractSolrMorphlineZkTestBase { public class SolrMorphlineZkAvroTest extends AbstractSolrMorphlineZkTestBase {

View File

@ -16,29 +16,29 @@
*/ */
package org.apache.solr.morphlines.solr; package org.apache.solr.morphlines.solr;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction; import java.io.File;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction.Action; import java.util.Iterator;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies.Consequence;
import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocument;
import org.apache.solr.util.BadHdfsThreadsFilter;
import org.junit.Test; import org.junit.Test;
import org.kitesdk.morphline.api.Record; import org.kitesdk.morphline.api.Record;
import org.kitesdk.morphline.base.Fields; import org.kitesdk.morphline.base.Fields;
import org.kitesdk.morphline.base.Notifications; import org.kitesdk.morphline.base.Notifications;
import java.io.File; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction;
import java.util.Iterator; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction.Action;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies.Consequence;
@ThreadLeakAction({Action.WARN}) @ThreadLeakFilters(defaultFilters = true, filters = {
@ThreadLeakLingering(linger = 0) BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
@ThreadLeakZombies(Consequence.CONTINUE) })
@ThreadLeakScope(Scope.NONE)
@Slow @Slow
public class SolrMorphlineZkTest extends AbstractSolrMorphlineZkTestBase { public class SolrMorphlineZkTest extends AbstractSolrMorphlineZkTestBase {

View File

@ -1,32 +1,31 @@
package org.apache.solr.cloud; package org.apache.solr.cloud;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.Charset; import java.nio.charset.Charset;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
import javax.security.auth.login.Configuration; import javax.security.auth.login.Configuration;
import org.apache.lucene.util.Constants;
import org.apache.hadoop.minikdc.MiniKdc; import org.apache.hadoop.minikdc.MiniKdc;
import org.apache.lucene.util.Constants;
import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.cloud.DefaultZkACLProvider;
import org.apache.solr.common.cloud.SaslZkACLProvider; import org.apache.solr.common.cloud.SaslZkACLProvider;
import org.apache.solr.common.cloud.SolrZkClient; import org.apache.solr.common.cloud.SolrZkClient;
import org.apache.solr.common.cloud.ZkACLProvider; import org.apache.solr.common.cloud.ZkACLProvider;
import org.apache.solr.common.cloud.DefaultZkACLProvider; import org.apache.solr.util.BadZookeeperThreadsFilter;
import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.CreateMode;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
/* /*
* Licensed to the Apache Software Foundation (ASF) under one or more * Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with * contributor license agreements. See the NOTICE file distributed with
@ -44,7 +43,9 @@ import org.slf4j.LoggerFactory;
* limitations under the License. * limitations under the License.
*/ */
@ThreadLeakScope(Scope.NONE) // zookeeper sasl login can leak threads, see ZOOKEEPER-2100 @ThreadLeakFilters(defaultFilters = true, filters = {
BadZookeeperThreadsFilter.class // hdfs currently leaks thread(s)
})
public class SaslZkACLProviderTest extends SolrTestCaseJ4 { public class SaslZkACLProviderTest extends SolrTestCaseJ4 {
protected static Logger log = LoggerFactory protected static Logger log = LoggerFactory

View File

@ -28,8 +28,8 @@ import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.lucene.util.LuceneTestCase.Nightly; import org.apache.lucene.util.LuceneTestCase.Nightly;
import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.LuceneTestCase.Slow;
@ -46,8 +46,8 @@ import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.params.CollectionParams; import org.apache.solr.common.params.CollectionParams;
import org.apache.solr.common.params.MapSolrParams; import org.apache.solr.common.params.MapSolrParams;
import org.apache.solr.util.DefaultSolrThreadFactory; import org.apache.solr.util.DefaultSolrThreadFactory;
import org.apache.solr.util.BadHdfsThreadsFilter;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
@ -56,7 +56,9 @@ import static org.apache.solr.common.cloud.ZkNodeProps.makeMap;
@Nightly @Nightly
@Slow @Slow
@SuppressSSL @SuppressSSL
@ThreadLeakScope(Scope.NONE) // hdfs client currently leaks thread(s) @ThreadLeakFilters(defaultFilters = true, filters = {
BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
})
public class SharedFSAutoReplicaFailoverTest extends AbstractFullDistribZkTestBase { public class SharedFSAutoReplicaFailoverTest extends AbstractFullDistribZkTestBase {
private static final boolean DEBUG = true; private static final boolean DEBUG = true;

View File

@ -22,16 +22,18 @@ import java.io.IOException;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.cloud.BasicDistributedZk2Test; import org.apache.solr.cloud.BasicDistributedZk2Test;
import org.apache.solr.util.BadHdfsThreadsFilter;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import com.carrotsearch.randomizedtesting.annotations.Nightly; import com.carrotsearch.randomizedtesting.annotations.Nightly;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
@Slow @Slow
@Nightly @Nightly
@ThreadLeakScope(Scope.NONE) // hdfs client currently leaks thread(s) @ThreadLeakFilters(defaultFilters = true, filters = {
BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
})
public class HdfsBasicDistributedZk2Test extends BasicDistributedZk2Test { public class HdfsBasicDistributedZk2Test extends BasicDistributedZk2Test {
private static MiniDFSCluster dfsCluster; private static MiniDFSCluster dfsCluster;

View File

@ -17,23 +17,24 @@ package org.apache.solr.cloud.hdfs;
* limitations under the License. * limitations under the License.
*/ */
import java.io.File;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.cloud.BasicDistributedZkTest; import org.apache.solr.cloud.BasicDistributedZkTest;
import org.apache.solr.util.BadHdfsThreadsFilter;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import com.carrotsearch.randomizedtesting.annotations.Nightly; import com.carrotsearch.randomizedtesting.annotations.Nightly;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
@Slow @Slow
@Nightly @Nightly
@ThreadLeakScope(Scope.NONE) // hdfs client currently leaks thread(s) @ThreadLeakFilters(defaultFilters = true, filters = {
BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
})
public class HdfsBasicDistributedZkTest extends BasicDistributedZkTest { public class HdfsBasicDistributedZkTest extends BasicDistributedZkTest {
private static MiniDFSCluster dfsCluster; private static MiniDFSCluster dfsCluster;

View File

@ -22,16 +22,18 @@ import java.io.IOException;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.cloud.ChaosMonkeySafeLeaderTest; import org.apache.solr.cloud.ChaosMonkeySafeLeaderTest;
import org.apache.solr.util.BadHdfsThreadsFilter;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import com.carrotsearch.randomizedtesting.annotations.Nightly; import com.carrotsearch.randomizedtesting.annotations.Nightly;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
@Slow @Slow
@Nightly @Nightly
@ThreadLeakScope(Scope.NONE) // hdfs client currently leaks thread(s) @ThreadLeakFilters(defaultFilters = true, filters = {
BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
})
public class HdfsChaosMonkeySafeLeaderTest extends ChaosMonkeySafeLeaderTest { public class HdfsChaosMonkeySafeLeaderTest extends ChaosMonkeySafeLeaderTest {
private static MiniDFSCluster dfsCluster; private static MiniDFSCluster dfsCluster;

View File

@ -23,16 +23,18 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.cloud.CollectionsAPIDistributedZkTest; import org.apache.solr.cloud.CollectionsAPIDistributedZkTest;
import org.apache.solr.update.HdfsUpdateLog; import org.apache.solr.update.HdfsUpdateLog;
import org.apache.solr.util.BadHdfsThreadsFilter;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import com.carrotsearch.randomizedtesting.annotations.Nightly; import com.carrotsearch.randomizedtesting.annotations.Nightly;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
@Slow @Slow
@Nightly @Nightly
@ThreadLeakScope(Scope.NONE) // hdfs client currently leaks thread(s) @ThreadLeakFilters(defaultFilters = true, filters = {
BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
})
public class HdfsCollectionsAPIDistributedZkTest extends CollectionsAPIDistributedZkTest { public class HdfsCollectionsAPIDistributedZkTest extends CollectionsAPIDistributedZkTest {
private static MiniDFSCluster dfsCluster; private static MiniDFSCluster dfsCluster;

View File

@ -22,16 +22,18 @@ import java.io.IOException;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.cloud.RecoveryZkTest; import org.apache.solr.cloud.RecoveryZkTest;
import org.apache.solr.util.BadHdfsThreadsFilter;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import com.carrotsearch.randomizedtesting.annotations.Nightly; import com.carrotsearch.randomizedtesting.annotations.Nightly;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
@Slow @Slow
@Nightly @Nightly
@ThreadLeakScope(Scope.NONE) // hdfs client currently leaks thread(s) @ThreadLeakFilters(defaultFilters = true, filters = {
BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
})
public class HdfsRecoveryZkTest extends RecoveryZkTest { public class HdfsRecoveryZkTest extends RecoveryZkTest {
private static MiniDFSCluster dfsCluster; private static MiniDFSCluster dfsCluster;

View File

@ -22,16 +22,18 @@ import java.io.IOException;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.cloud.SyncSliceTest; import org.apache.solr.cloud.SyncSliceTest;
import org.apache.solr.util.BadHdfsThreadsFilter;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import com.carrotsearch.randomizedtesting.annotations.Nightly; import com.carrotsearch.randomizedtesting.annotations.Nightly;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
@Slow @Slow
@Nightly @Nightly
@ThreadLeakScope(Scope.NONE) // hdfs client currently leaks thread(s) @ThreadLeakFilters(defaultFilters = true, filters = {
BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
})
public class HdfsSyncSliceTest extends SyncSliceTest { public class HdfsSyncSliceTest extends SyncSliceTest {
private static MiniDFSCluster dfsCluster; private static MiniDFSCluster dfsCluster;

View File

@ -0,0 +1,87 @@
package org.apache.solr.cloud.hdfs;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.util.BadHdfsThreadsFilter;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
@ThreadLeakFilters(defaultFilters = true, filters = {
BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
})
public class HdfsThreadLeakTest extends SolrTestCaseJ4 {
private static MiniDFSCluster dfsCluster;
@BeforeClass
public static void beforeClass() throws Exception {
dfsCluster = HdfsTestUtil.setupClass(createTempDir().toFile().getAbsolutePath(), false);
}
@AfterClass
public static void afterClass() throws Exception {
HdfsTestUtil.teardownClass(dfsCluster);
dfsCluster = null;
}
@Before
public void setUp() throws Exception {
super.setUp();
}
@After
public void tearDown() throws Exception {
super.tearDown();
}
@Test
public void testBasic() throws IOException {
URI uri = dfsCluster.getURI();
Path path = new Path(uri.toString());
Configuration conf = new Configuration();
conf.setBoolean("fs.hdfs.impl.disable.cache", true);
FileSystem fs = FileSystem.get(path.toUri(), conf);
Path testFile = new Path(uri.toString() + "/testfile");
FSDataOutputStream out = fs.create(testFile);
out.write(5);
out.hflush();
out.close();
((DistributedFileSystem) fs).recoverLease(testFile);
fs.close();
}
}

View File

@ -22,16 +22,18 @@ import java.io.IOException;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.cloud.UnloadDistributedZkTest; import org.apache.solr.cloud.UnloadDistributedZkTest;
import org.apache.solr.util.BadHdfsThreadsFilter;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import com.carrotsearch.randomizedtesting.annotations.Nightly; import com.carrotsearch.randomizedtesting.annotations.Nightly;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
@Slow @Slow
@Nightly @Nightly
@ThreadLeakScope(Scope.NONE) // hdfs client currently leaks thread(s) @ThreadLeakFilters(defaultFilters = true, filters = {
BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
})
public class HdfsUnloadDistributedZkTest extends UnloadDistributedZkTest { public class HdfsUnloadDistributedZkTest extends UnloadDistributedZkTest {
private static MiniDFSCluster dfsCluster; private static MiniDFSCluster dfsCluster;

View File

@ -17,8 +17,8 @@
package org.apache.solr.cloud.hdfs; package org.apache.solr.cloud.hdfs;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.store.NRTCachingDirectory; import org.apache.lucene.store.NRTCachingDirectory;
@ -38,6 +38,7 @@ import org.apache.solr.store.blockcache.BlockCache;
import org.apache.solr.store.blockcache.BlockDirectory; import org.apache.solr.store.blockcache.BlockDirectory;
import org.apache.solr.store.blockcache.BlockDirectoryCache; import org.apache.solr.store.blockcache.BlockDirectoryCache;
import org.apache.solr.store.blockcache.Cache; import org.apache.solr.store.blockcache.Cache;
import org.apache.solr.util.BadHdfsThreadsFilter;
import org.apache.solr.util.RefCounted; import org.apache.solr.util.RefCounted;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
@ -50,7 +51,9 @@ import java.util.List;
@Slow @Slow
@Nightly @Nightly
@ThreadLeakScope(Scope.NONE) // hdfs client currently leaks thread(s) @ThreadLeakFilters(defaultFilters = true, filters = {
BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
})
public class HdfsWriteToMultipleCollectionsTest extends BasicDistributedZkTest { public class HdfsWriteToMultipleCollectionsTest extends BasicDistributedZkTest {
private static final String SOLR_HDFS_HOME = "solr.hdfs.home"; private static final String SOLR_HDFS_HOME = "solr.hdfs.home";
private static final String SOLR_HDFS_BLOCKCACHE_GLOBAL = "solr.hdfs.blockcache.global"; private static final String SOLR_HDFS_BLOCKCACHE_GLOBAL = "solr.hdfs.blockcache.global";

View File

@ -17,8 +17,8 @@
package org.apache.solr.cloud.hdfs; package org.apache.solr.cloud.hdfs;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -36,6 +36,7 @@ import org.apache.solr.cloud.ChaosMonkey;
import org.apache.solr.common.params.CollectionParams.CollectionAction; import org.apache.solr.common.params.CollectionParams.CollectionAction;
import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.NamedList;
import org.apache.solr.util.BadHdfsThreadsFilter;
import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
@ -50,7 +51,9 @@ import java.util.Timer;
import java.util.TimerTask; import java.util.TimerTask;
@Slow @Slow
@ThreadLeakScope(Scope.NONE) // hdfs client currently leaks thread(s) @ThreadLeakFilters(defaultFilters = true, filters = {
BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
})
public class StressHdfsTest extends BasicDistributedZkTest { public class StressHdfsTest extends BasicDistributedZkTest {
private static final String DELETE_DATA_DIR_COLLECTION = "delete_data_dir"; private static final String DELETE_DATA_DIR_COLLECTION = "delete_data_dir";

View File

@ -26,15 +26,17 @@ import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.cloud.hdfs.HdfsTestUtil; import org.apache.solr.cloud.hdfs.HdfsTestUtil;
import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.DirectoryFactory.DirContext; import org.apache.solr.core.DirectoryFactory.DirContext;
import org.apache.solr.util.BadHdfsThreadsFilter;
import org.apache.solr.util.MockCoreContainer.MockCoreDescriptor; import org.apache.solr.util.MockCoreContainer.MockCoreDescriptor;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
@ThreadLeakScope(Scope.NONE) // hdfs client currently leaks thread(s) @ThreadLeakFilters(defaultFilters = true, filters = {
BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
})
public class HdfsDirectoryFactoryTest extends SolrTestCaseJ4 { public class HdfsDirectoryFactoryTest extends SolrTestCaseJ4 {
private static MiniDFSCluster dfsCluster; private static MiniDFSCluster dfsCluster;

View File

@ -49,16 +49,18 @@ import org.apache.solr.update.HdfsUpdateLog;
import org.apache.solr.update.UpdateHandler; import org.apache.solr.update.UpdateHandler;
import org.apache.solr.update.UpdateLog; import org.apache.solr.update.UpdateLog;
import org.apache.solr.update.processor.DistributedUpdateProcessor.DistribPhase; import org.apache.solr.update.processor.DistributedUpdateProcessor.DistribPhase;
import org.apache.solr.util.BadHdfsThreadsFilter;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Ignore; import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
import org.noggit.ObjectBuilder; import org.noggit.ObjectBuilder;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
@ThreadLeakScope(Scope.NONE) // hdfs mini cluster currently leaks threads @ThreadLeakFilters(defaultFilters = true, filters = {
BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
})
// TODO: longer term this should be combined with TestRecovery somehow ?? // TODO: longer term this should be combined with TestRecovery somehow ??
public class TestRecoveryHdfs extends SolrTestCaseJ4 { public class TestRecoveryHdfs extends SolrTestCaseJ4 {

View File

@ -33,17 +33,18 @@ import org.apache.lucene.store.NoLockFactory;
import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.store.RAMDirectory;
import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.cloud.hdfs.HdfsTestUtil; import org.apache.solr.cloud.hdfs.HdfsTestUtil;
import org.apache.solr.util.BadHdfsThreadsFilter;
import org.junit.After; import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Before; import org.junit.Before;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
@ThreadLeakScope(Scope.NONE) // hdfs client currently leaks thread (HADOOP-9049) @ThreadLeakFilters(defaultFilters = true, filters = {
//@Ignore("this test violates the test security policy because of org.apache.hadoop.fs.RawLocalFileSystem.mkdirs") BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
})
public class HdfsDirectoryTest extends SolrTestCaseJ4 { public class HdfsDirectoryTest extends SolrTestCaseJ4 {
private static final int MAX_NUMBER_OF_WRITES = 10000; private static final int MAX_NUMBER_OF_WRITES = 10000;

View File

@ -26,16 +26,18 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.lucene.store.Lock; import org.apache.lucene.store.Lock;
import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.cloud.hdfs.HdfsTestUtil; import org.apache.solr.cloud.hdfs.HdfsTestUtil;
import org.apache.solr.util.BadHdfsThreadsFilter;
import org.junit.After; import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Before; import org.junit.Before;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
@ThreadLeakScope(Scope.NONE) // hdfs client currently leaks thread (HADOOP-9049) @ThreadLeakFilters(defaultFilters = true, filters = {
BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
})
public class HdfsLockFactoryTest extends SolrTestCaseJ4 { public class HdfsLockFactoryTest extends SolrTestCaseJ4 {
private static MiniDFSCluster dfsCluster; private static MiniDFSCluster dfsCluster;

View File

@ -29,14 +29,16 @@ import org.apache.solr.SolrTestCaseJ4.SuppressObjectReleaseTracker;
import org.apache.solr.cloud.hdfs.HdfsTestUtil; import org.apache.solr.cloud.hdfs.HdfsTestUtil;
import org.apache.solr.common.util.IOUtils; import org.apache.solr.common.util.IOUtils;
import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.util.BadHdfsThreadsFilter;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
@ThreadLeakScope(Scope.NONE) // hdfs mini cluster currently leaks threads @ThreadLeakFilters(defaultFilters = true, filters = {
BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
})
@SuppressObjectReleaseTracker(bugUrl = "https://issues.apache.org/jira/browse/SOLR-7115") @SuppressObjectReleaseTracker(bugUrl = "https://issues.apache.org/jira/browse/SOLR-7115")
public class TestHdfsUpdateLog extends SolrTestCaseJ4 { public class TestHdfsUpdateLog extends SolrTestCaseJ4 {

View File

@ -0,0 +1,36 @@
package org.apache.solr.util;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.carrotsearch.randomizedtesting.ThreadFilter;
public class BadHdfsThreadsFilter implements ThreadFilter {
@Override
public boolean reject(Thread t) {
String name = t.getName();
if (name.startsWith("IPC Parameter Sending Thread ")) { // SOLR-5007
return true;
} else if (name.startsWith("org.apache.hadoop.hdfs.PeerCache")) { // SOLR-7288
return true;
} else if (name.startsWith("LeaseRenewer")) { // SOLR-7287
return true;
}
return false;
}
}

View File

@ -0,0 +1,35 @@
package org.apache.solr.util;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.carrotsearch.randomizedtesting.ThreadFilter;
public class BadZookeeperThreadsFilter implements ThreadFilter {
@Override
public boolean reject(Thread t) {
String name = t.getName();
StackTraceElement [] stack = t.getStackTrace();
if (name.startsWith("Thread-") && stack.length > 1 && stack[stack.length - 2].getClassName().equals("org.apache.zookeeper.Login$1")) {
return true; // see ZOOKEEPER-2100
}
return false;
}
}