Use FSDirectory for TestDocTermOrdsUninvertLimit to reduce the amount of RAM the test needs.

This lowers the risk of OOM, w/o needing to sacrifice testing of how the randomized merging/segments might impact the uninversion
This commit is contained in:
Chris Hostetter 2019-04-05 15:06:50 -07:00
parent 7602f3c78e
commit 37166ce4e9
1 changed files with 3 additions and 3 deletions

View File

@ -38,8 +38,6 @@ public class TestDocTermOrdsUninvertLimit extends SolrTestCase {
* New limit is 2^31, which is not very realistic to unit-test. */
@SuppressWarnings({"ConstantConditions", "PointlessBooleanExpression"})
@Nightly
// commented 4-Sep-2018 @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 12-Jun-2018
// commented out on: 24-Dec-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 14-Oct-2018
public void testTriggerUnInvertLimit() throws IOException {
final boolean SHOULD_TRIGGER = false; // Set this to true to use the test with the old implementation
@ -48,7 +46,9 @@ public class TestDocTermOrdsUninvertLimit extends SolrTestCase {
final int DOCS = (1<<16)-1; // The number of documents within a single pass (simplified)
final int TERMS = REF_LIMIT/DOCS; // Each document must have this many references aka terms hit limit
Directory dir = newDirectory();
// disk based Directory to reduce risk of OOM
Directory dir = newFSDirectory(createTempDir("TestDocTermOrdsUninvertLimit"));
final RandomIndexWriter w = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
Document doc = new Document();