From 37166ce4e9c4b9595f4c39be9b7b6d0947819f85 Mon Sep 17 00:00:00 2001 From: Chris Hostetter Date: Fri, 5 Apr 2019 15:06:50 -0700 Subject: [PATCH] Use FSDirectory for TestDocTermOrdsUninvertLimit to reduce the amount of RAM the test needs. This lowers the risk of OOM, w/o needing to sacrifice testing of how the randomized merging/segments might impact the uninversion --- .../solr/uninverting/TestDocTermOrdsUninvertLimit.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/solr/core/src/test/org/apache/solr/uninverting/TestDocTermOrdsUninvertLimit.java b/solr/core/src/test/org/apache/solr/uninverting/TestDocTermOrdsUninvertLimit.java index 27942d28923..81e8c7bf1e0 100644 --- a/solr/core/src/test/org/apache/solr/uninverting/TestDocTermOrdsUninvertLimit.java +++ b/solr/core/src/test/org/apache/solr/uninverting/TestDocTermOrdsUninvertLimit.java @@ -38,8 +38,6 @@ public class TestDocTermOrdsUninvertLimit extends SolrTestCase { * New limit is 2^31, which is not very realistic to unit-test. */ @SuppressWarnings({"ConstantConditions", "PointlessBooleanExpression"}) @Nightly - // commented 4-Sep-2018 @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 12-Jun-2018 - // commented out on: 24-Dec-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 14-Oct-2018 public void testTriggerUnInvertLimit() throws IOException { final boolean SHOULD_TRIGGER = false; // Set this to true to use the test with the old implementation @@ -48,7 +46,9 @@ public class TestDocTermOrdsUninvertLimit extends SolrTestCase { final int DOCS = (1<<16)-1; // The number of documents within a single pass (simplified) final int TERMS = REF_LIMIT/DOCS; // Each document must have this many references aka terms hit limit - Directory dir = newDirectory(); + // disk based Directory to reduce risk of OOM + Directory dir = newFSDirectory(createTempDir("TestDocTermOrdsUninvertLimit")); + final RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); Document doc = new Document();