LUCENE-3808: Switch LuceneTestCaseRunner to RandomizedRunner. Enforce Random sharing contracts. Enforce thread leaks.

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1326351 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Dawid Weiss 2012-04-15 14:41:44 +00:00
parent aec9ccdfb9
commit cf85aab1a0
524 changed files with 5547 additions and 4107 deletions

2
.gitignore vendored
View File

@ -381,3 +381,5 @@
/modules/suggest/*.iml
/modules/suggest/pom.xml
/modules/suggest/dist
/modules/spatial/build/

View File

@ -18,6 +18,12 @@
-->
<project name="lucene-solr" default="test" basedir=".">
<target name="test-help" description="Test runner help">
<subant target="test-help" inheritall="false" failonerror="true">
<fileset dir="lucene" includes="build.xml" />
</subant>
</target>
<target name="test" description="Test both Lucene and Solr" depends="validate">
<sequential>
<subant target="test" inheritall="false" failonerror="true">

View File

@ -169,5 +169,6 @@
<classpathentry kind="lib" path="solr/contrib/velocity/lib/commons-beanutils-1.7.0.jar"/>
<classpathentry kind="lib" path="solr/contrib/velocity/lib/commons-collections-3.2.1.jar"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="lib" path="lucene/test-framework/lib/randomizedtesting-runner-1.0.0.jar"/>
<classpathentry kind="output" path="bin"/>
</classpath>

View File

@ -74,9 +74,6 @@
<pathelement path="${java.class.path}"/>
</path>
<!-- remove this -->
<target name="test-tag" depends="test-backwards" description="deprecated"/>
<target name="compile-backwards" depends="compile-core"
description="Runs tests of a previous Lucene version.">
<sequential>
@ -98,10 +95,22 @@
</sequential>
</target>
<target name="test-backwards" /><!--add here after 4.0: depends="compile-backwards, backwards-test-warning, junit-backwards-mkdir, junit-backwards-sequential, junit-backwards-parallel"-->
<target name="backwards-test-warning" depends="check-backwards-params" if="backwards.ignoring.params">
<echo>
Warning: Ignoring your multiplier and nightly settings for backwards tests.
These tests are for API compatibility only!
</echo>
</target>
<target name="junit-backwards-mkdir">
<!--
Add dependency after 4.0: depends="compile-backwards, backwards-test-warning"
and uncomment inside of this target.
-->
<target name="test-backwards">
<!--
<mkdir dir="${build.dir.backwards}/test"/>
<backwards-test-macro/>
-->
</target>
<target name="check-backwards-params">
@ -113,13 +122,6 @@
</condition>
</target>
<target name="backwards-test-warning" depends="check-backwards-params" if="backwards.ignoring.params">
<echo>
Warning: Ignoring your multiplier and nightly settings for backwards tests.
These tests are for API compatibility only!
</echo>
</target>
<macrodef name="backwards-test-macro">
<attribute name="threadNum" default="1"/>
<attribute name="threadTotal" default="1"/>
@ -128,30 +130,17 @@
Note: we disable multiplier/nightly because the purpose is to find API breaks
-->
<test-macro
dataDir="${backwards.dir}/core/src/test"
tempDir="${build.dir.backwards}/test"
junit.classpath="backwards.junit.classpath"
junit.output.dir="${junit.output.dir.backwards}"
dataDir="${backwards.dir}/core/src/test"
tempDir="${build.dir.backwards}/test"
junit.classpath="backwards.junit.classpath"
junit.output.dir="${junit.output.dir.backwards}"
tests.nightly="false"
tests.multiplier="1"
threadNum="@{threadNum}"
threadTotal="@{threadTotal}"/>
</sequential>
</sequential>
</macrodef>
<target name="junit-backwards-sequential" if="tests.sequential">
<backwards-test-macro/>
</target>
<target name="junit-backwards-parallel" unless="tests.sequential">
<parallel threadsPerProcessor="${tests.threadspercpu}">
<backwards-test-macro threadNum="1" threadTotal="4"/>
<backwards-test-macro threadNum="2" threadTotal="4"/>
<backwards-test-macro threadNum="3" threadTotal="4"/>
<backwards-test-macro threadNum="4" threadTotal="4"/>
</parallel>
</target>
<target name="compile-core" depends="compile-lucene-core"/>
<!--

View File

@ -61,8 +61,12 @@
<property name="junit.jar" value="junit-4.10.jar"/>
<property name="junit-location.jar" value="${common.dir}/test-framework/lib/${junit.jar}"/>
<path id="junit-path">
<pathelement location="${junit-location.jar}"/>
<fileset dir="${common.dir}/test-framework/lib">
<include name="junit-*.jar" />
<include name="randomizedtesting-runner-*.jar" />
</fileset>
</path>
<path id="ant-path">
@ -70,15 +74,10 @@
</path>
<!-- default arguments to pass to JVM executing tests -->
<property name="testmethod" value=""/>
<property name="args" value=""/>
<property name="tests.threadspercpu" value="1" />
<condition property="tests.sequential">
<or>
<isset property="testcase"/>
<equals arg1="${tests.threadspercpu}" arg2="0"/>
</or>
</condition>
<property name="tests.seed" value="" />
<property name="tests.threadspercpu" value="auto" />
<property name="tests.multiplier" value="1" />
<property name="tests.codec" value="random" />
<property name="tests.postingsformat" value="random" />
@ -86,9 +85,6 @@
<property name="tests.timezone" value="random" />
<property name="tests.directory" value="random" />
<property name="tests.linedocsfile" value="europarl.lines.txt.gz" />
<property name="tests.iter" value="1" />
<property name="tests.iter.min" value="${tests.iter}" />
<property name="tests.seed" value="random" />
<property name="tests.loggingfile" value="/dev/null"/>
<property name="tests.nightly" value="false" />
<property name="tests.weekly" value="false" />
@ -171,13 +167,6 @@
<property name="junit.output.dir.backwards" location="${build.dir.backwards}/test"/>
<property name="junit.reports" location="${build.dir}/test/reports"/>
<property name="junit.reports.backwards" location="${build.dir.backwards}/test/reports"/>
<property name="junit.excludes" value="**/Abstract*"/>
<condition property="junit.details.formatter"
value="org.apache.tools.ant.taskdefs.optional.junit.BriefJUnitResultFormatter"
else="org.apache.lucene.util.LuceneJUnitResultFormatter">
<isset property="tests.sequential"/>
</condition>
<property name="junit.parallel.selector" value="org.apache.lucene.util.LuceneJUnitDividingSelector"/>
<property name="manifest.file" location="${build.dir}/MANIFEST.MF"/>
@ -263,7 +252,15 @@
</target>
<target name="init" depends="resolve">
<!-- currently empty -->
<!-- JUnit4 taskdef. -->
<taskdef resource="com/carrotsearch/junit4/antlib.xml">
<classpath>
<fileset dir="${common.dir}/test-framework/lib">
<include name="junit4-ant-*.jar" />
<include name="junit-*.jar" />
</fileset>
</classpath>
</taskdef>
</target>
<target name="resolve" depends="ivy-availability-check,ivy-fail">
@ -271,6 +268,7 @@
only special cases need bundles -->
<ivy:retrieve type="jar,bundle" log="download-only"/>
</target>
<target name="ivy-availability-check" unless="ivy.available">
<echo>
This build requires Ivy and Ivy could not be found in your ant classpath.
@ -622,152 +620,332 @@
</copy>
</sequential>
</macrodef>
<target name="test-updatecache" description="Overwrite tests' timings cache for balancing." depends="init">
<mergehints file="${common.dir}/tools/junit4/cached-timehints.txt">
<resources>
<!-- The order is important. Include previous stats first, then append new stats. -->
<fileset dir="${common.dir}/tools/junit4">
<include name="*.txt" />
</fileset>
<fileset dir="${common.dir}/..">
<include name="**/tests-timehints.txt" />
<exclude name="lucene/tools/junit4/**" />
</fileset>
</resources>
</mergehints>
</target>
<!-- Aliases for tests filters -->
<condition property="tests.class" value="*.${testcase}">
<isset property="testcase" />
</condition>
<condition property="tests.method" value="${testmethod}">
<isset property="testmethod" />
</condition>
<!-- Test macro using junit4. -->
<macrodef name="test-macro" description="Executes junit tests.">
<attribute name="junit.output.dir" default="${junit.output.dir}"/>
<attribute name="junit.classpath" default="junit.classpath"/>
<attribute name="dataDir" default="${tests.src.dir}"/>
<attribute name="tempDir" default="${build.dir}/test"/>
<attribute name="threadNum" default="1"/>
<attribute name="threadTotal" default="1"/>
<attribute name="junit.output.dir" default="${junit.output.dir}"/>
<attribute name="junit.classpath" default="junit.classpath"/>
<attribute name="testsDir" default="${build.dir}/classes/test"/>
<attribute name="tempDir" default="${build.dir}/test"/>
<attribute name="threadNum" default="1"/>
<attribute name="tests.nightly" default="${tests.nightly}"/>
<attribute name="tests.weekly" default="${tests.weekly}"/>
<attribute name="tests.slow" default="${tests.slow}"/>
<attribute name="tests.multiplier" default="${tests.multiplier}"/>
<sequential>
<condition property="runall">
<not><or>
<isset property="testcase" />
<isset property="testpackage" />
<isset property="testpackageroot" />
</or></not>
</condition>
<!-- <mkdir dir="@{tempDir}/@{pattern}"/>
This is very loud and obnoxious. abuse touch instead for a "quiet" mkdir
-->
<touch file="@{tempDir}/@{threadNum}/quiet.ant" verbose="false" mkdirs="true"/>
<junit printsummary="off" haltonfailure="no" maxmemory="${tests.heapsize}" tempdir="@{tempDir}/@{threadNum}"
errorProperty="tests.failed" failureProperty="tests.failed" forkmode="perBatch" dir="@{tempDir}/@{threadNum}"
filtertrace="false">
<classpath refid="@{junit.classpath}"/>
<assertions>
<enable package="org.apache.lucene"/>
<enable package="org.apache.solr"/>
</assertions>
<jvmarg line="${args}"/>
<!-- allow tests to control debug prints -->
<sysproperty key="tests.verbose" value="${tests.verbose}"/>
<!-- even more debugging -->
<sysproperty key="tests.infostream" value="${tests.infostream}"/>
<!-- directory for formatter lock -->
<sysproperty key="tests.lockdir" value="${tests.lockdir}"/>
<!-- set the codec tests should run with -->
<sysproperty key="tests.codec" value="${tests.codec}"/>
<!-- set the postingsformat tests should run with -->
<sysproperty key="tests.postingsformat" value="${tests.postingsformat}"/>
<!-- set the locale tests should run with -->
<sysproperty key="tests.locale" value="${tests.locale}"/>
<!-- set the timezone tests should run with -->
<sysproperty key="tests.timezone" value="${tests.timezone}"/>
<!-- set the directory tests should run with -->
<sysproperty key="tests.directory" value="${tests.directory}"/>
<!-- set the line file source for oal.util.LineFileDocs -->
<sysproperty key="tests.linedocsfile" value="${tests.linedocsfile}"/>
<!-- set the number of times tests should run -->
<sysproperty key="tests.iter" value="${tests.iter}"/>
<!-- set the minimum number of times tests should run unless failure -->
<sysproperty key="tests.iter.min" value="${tests.iter.min}"/>
<!-- set the test seed -->
<sysproperty key="tests.seed" value="${tests.seed}"/>
<!-- set the Version that tests should run against -->
<sysproperty key="tests.luceneMatchVersion" value="${tests.luceneMatchVersion}"/>
<!-- for lucene we can be strict, and we don't want false fails even across methods -->
<sysproperty key="tests.cleanthreads" value="${tests.cleanthreads.sysprop}"/>
<!-- logging config file -->
<sysproperty key="java.util.logging.config.file" value="${tests.loggingfile}"/>
<!-- set whether or not nightly tests should run -->
<sysproperty key="tests.nightly" value="@{tests.nightly}"/>
<!-- set whether or not weekly tests should run -->
<sysproperty key="tests.weekly" value="@{tests.weekly}"/>
<!-- set whether or not slow tests should run -->
<sysproperty key="tests.slow" value="@{tests.slow}"/>
<!-- set whether tests framework should not require java assertions enabled -->
<sysproperty key="tests.asserts.gracious" value="${tests.asserts.gracious}"/>
<!-- Warn if somebody uses removed properties. -->
<fail message="This property has been removed: tests.iter, use -Dtests.iters=N.">
<condition>
<isset property="tests.iter" />
</condition>
</fail>
<!-- TODO: create propertyset for test properties, so each project can have its own set -->
<sysproperty key="tests.multiplier" value="@{tests.multiplier}"/>
<sysproperty key="tempDir" file="@{tempDir}/@{threadNum}"/>
<!-- Defaults. -->
<property name="tests.class" value="" />
<property name="tests.method" value="" />
<property name="tests.dynamicAssignmentRatio" value="0.25" /> <!-- 25% of suites -->
<property name="tests.haltonfailure" value="true" />
<property name="tests.iters" value="" />
<sysproperty key="lucene.version" value="${dev.version}"/>
<junit4
dir="@{tempDir}"
tempdir="@{tempDir}"
maxmemory="${tests.heapsize}"
parallelism="@{threadNum}"
printSummary="true"
haltonfailure="${tests.haltonfailure}"
failureProperty="tests.failed"
dynamicAssignmentRatio="${tests.dynamicAssignmentRatio}"
shuffleOnSlave="true"
leaveTemporary="false"
seed="${tests.seed}"
>
<!-- Classpaths. -->
<classpath refid="@{junit.classpath}"/>
<!-- Assertions. -->
<assertions>
<enable package="org.apache.lucene"/>
<enable package="org.apache.solr"/>
</assertions>
<!-- JVM arguments and system properties. -->
<jvmarg line="${args}"/>
<!-- set the number of times tests should run -->
<sysproperty key="tests.iters" value="${tests.iters}"/>
<!-- allow tests to control debug prints -->
<sysproperty key="tests.verbose" value="${tests.verbose}"/>
<!-- even more debugging -->
<sysproperty key="tests.infostream" value="${tests.infostream}"/>
<!-- directory for formatter lock -->
<sysproperty key="tests.lockdir" value="${tests.lockdir}"/>
<!-- set the codec tests should run with -->
<sysproperty key="tests.codec" value="${tests.codec}"/>
<!-- set the postingsformat tests should run with -->
<sysproperty key="tests.postingsformat" value="${tests.postingsformat}"/>
<!-- set the locale tests should run with -->
<sysproperty key="tests.locale" value="${tests.locale}"/>
<!-- set the timezone tests should run with -->
<sysproperty key="tests.timezone" value="${tests.timezone}"/>
<!-- set the directory tests should run with -->
<sysproperty key="tests.directory" value="${tests.directory}"/>
<!-- set the line file source for oal.util.LineFileDocs -->
<sysproperty key="tests.linedocsfile" value="${tests.linedocsfile}"/>
<!-- set the Version that tests should run against -->
<sysproperty key="tests.luceneMatchVersion" value="${tests.luceneMatchVersion}"/>
<!-- for lucene we can be strict, and we don't want false fails even across methods -->
<sysproperty key="tests.cleanthreads" value="${tests.cleanthreads.sysprop}"/>
<!-- logging config file -->
<sysproperty key="java.util.logging.config.file" value="${tests.loggingfile}"/>
<!-- set whether or not nightly tests should run -->
<sysproperty key="tests.nightly" value="@{tests.nightly}"/>
<!-- set whether or not weekly tests should run -->
<sysproperty key="tests.weekly" value="@{tests.weekly}"/>
<!-- set whether or not slow tests should run -->
<sysproperty key="tests.slow" value="@{tests.slow}"/>
<sysproperty key="testmethod" value="${testmethod}"/>
<sysproperty key="jetty.testMode" value="1"/>
<sysproperty key="jetty.insecurerandom" value="1"/>
<sysproperty key="solr.directoryFactory" value="org.apache.solr.core.MockDirectoryFactory"/>
<formatter type="xml"/>
<formatter classname="${junit.details.formatter}" usefile="false"/>
<batchtest fork="yes" todir="@{junit.output.dir}" if="runall">
<fileset dir="@{dataDir}" includes="**/Test*.java,**/*Test.java" excludes="${junit.excludes}">
<custom classname="${junit.parallel.selector}" classpathref="@{junit.classpath}">
<param name="divisor" value="@{threadTotal}" />
<param name="part" value="@{threadNum}" />
</custom>
</fileset>
</batchtest>
<batchtest fork="yes" todir="@{junit.output.dir}" if="testpackage">
<fileset dir="@{dataDir}" includes="**/${testpackage}/**/Test*.java,**/${testpackage}/**/*Test.java" excludes="${junit.excludes}">
<custom classname="${junit.parallel.selector}" classpathref="@{junit.classpath}">
<param name="divisor" value="@{threadTotal}" />
<param name="part" value="@{threadNum}" />
</custom>
</fileset>
</batchtest>
<batchtest fork="yes" todir="@{junit.output.dir}" if="testpackageroot">
<fileset dir="@{dataDir}" includes="**/${testpackageroot}/Test*.java,**/${testpackageroot}/*Test.java" excludes="${junit.excludes}">
<custom classname="${junit.parallel.selector}" classpathref="@{junit.classpath}">
<param name="divisor" value="@{threadTotal}" />
<param name="part" value="@{threadNum}" />
</custom>
</fileset>
</batchtest>
<batchtest fork="yes" todir="@{junit.output.dir}" if="testcase">
<fileset dir="@{dataDir}" includes="**/${testcase}.java"/>
</batchtest>
</junit>
<fail if="tests.failed">Tests failed!</fail>
</sequential>
<!-- set whether tests framework should not require java assertions enabled -->
<sysproperty key="tests.asserts.gracious" value="${tests.asserts.gracious}"/>
<!-- TODO: create propertyset for test properties, so each project can have its own set -->
<sysproperty key="tests.multiplier" value="@{tests.multiplier}"/>
<!-- Temporary directory in the cwd. -->
<sysproperty key="tempDir" value="."/>
<sysproperty key="lucene.version" value="${dev.version}"/>
<sysproperty key="jetty.testMode" value="1"/>
<sysproperty key="jetty.insecurerandom" value="1"/>
<sysproperty key="solr.directoryFactory" value="org.apache.solr.core.MockDirectoryFactory"/>
<!-- Use static cached test balancing statistcs. -->
<balancers>
<execution-times>
<fileset dir="${common.dir}/tools/junit4" includes="**/*.txt" />
</execution-times>
</balancers>
<!-- Reporting listeners. -->
<listeners>
<!-- A simplified console output (maven-like). -->
<report-text
showThrowable="true"
showStackTraces="true"
showOutputStream="true"
showErrorStream="true"
showStatusOk="false"
showStatusError="true"
showStatusFailure="true"
showStatusIgnored="true"
showSuiteSummary="true"
/>
<!-- Emits full status for all tests, their relative order on slaves. -->
<report-text
file="${junit.output.dir}/tests-report.txt"
showThrowable="true"
showStackTraces="true"
showOutputStream="true"
showErrorStream="true"
showStatusOk="true"
showStatusError="true"
showStatusFailure="true"
showStatusIgnored="true"
showSuiteSummary="true"
/>
<!-- Emits status on errors and failures only. -->
<report-text
file="${junit.output.dir}/tests-failures.txt"
showThrowable="true"
showStackTraces="true"
showOutputStream="true"
showErrorStream="true"
showStatusOk="false"
showStatusError="true"
showStatusFailure="true"
showStatusIgnored="false"
showSuiteSummary="false"
/>
<!-- Emit the information about tests timings (could be used to determine
the slowest tests or for reuse in balancing). -->
<report-execution-times file="${junit.output.dir}/tests-timehints.txt" historyLength="5" />
<!-- XML reports compatible with ant-report task. We don't use ant-report anyway so I omit these. -->
<!--
<report-ant-xml dir="@{junit.output.dir}" />
-->
<report-json file="${junit.output.dir}/tests-report-${ant.project.name}/index.html" />
</listeners>
<!-- Input test classes. -->
<fileset dir="@{testsDir}">
<include name="**/Test*.class" />
<include name="**/*Test.class" />
<exclude name="**/*$*" />
</fileset>
</junit4>
<!-- Report the 5 slowest tests from this run to the console. -->
<echo>5 slowest tests:</echo>
<tophints max="5">
<file file="${junit.output.dir}/tests-timehints.txt" />
</tophints>
</sequential>
</macrodef>
<target name="test" depends="compile-test,validate,junit-mkdir,junit-sequential,junit-parallel" description="Runs unit tests"/>
<target name="test-times" description="Show the slowest tests (averages)." depends="init">
<property name="max" value="10" />
<echo>Showing ${max} slowest tests according to local stats. (change with -Dmax=...).</echo>
<tophints max="${max}">
<fileset dir="${basedir}" includes="**/tests-timehints.txt" />
</tophints>
<target name="junit-mkdir">
<echo>Showing ${max} slowest tests in cached stats. (change with -Dmax=...).</echo>
<tophints max="${max}">
<fileset dir="${common.dir}/tools/junit4">
<include name="*.txt" />
</fileset>
</tophints>
</target>
<target name="test-help" description="Help on 'ant test' syntax.">
<echo>
#
# Test case filtering. --------------------------------------------
#
# - 'tests.class' is a class-filtering shell-like glob pattern,
# 'testcase' is an alias of "tests.class=*.${testcase}"
# - 'tests.method' is a method-filtering glob pattern.
# 'testmethod' is an alias of "tests.method=${testmethod}"
#
# Run a single test case (variants)
ant test -Dtests.class=org.apache.lucene.package.ClassName
ant test "-Dtests.class=*.ClassName"
ant test -Dtestcase=ClassName
# Run all tests in a package and sub-packages
ant test "-Dtests.class=org.apache.lucene.package.*"
# Run any test methods that contain 'esi' (like: ...r*esi*ze...).
ant test "-Dtests.method=*esi*"
#
# Seed and repetitions. -------------------------------------------
#
# Run with a given seed (seed is a hex-encoded long).
ant test -Dtests.seed=DEADBEEF
# Repeats a given test N times (note filters).
ant test -Dtests.iters=N -Dtestcase=ClassName -Dtestmethod=mytest
# Repeats _all_ tests of ClassName N times. Every test repetition
# will have a different seed.
ant test -Dtests.iters=N -Dtestcase=ClassName
# Repeats _all_ tests of ClassName N times. Every test repetition
# will have exactly the same master (dead) and method-level (beef)
# seed.
ant test -Dtests.iters=N -Dtestcase=ClassName -Dtests.seed=dead:beef
#
# Load balancing and caches. --------------------------------------
#
# Run sequentially (one slave JVM).
ant -Dtests.threadspercpu=1 test
# Run with more slave JVMs than the default.
# Don't count hypercores for CPU-intense tests.
# Make sure there is enough RAM to handle child JVMs.
ant -Dtests.threadspercpu=8 test
# Use repeatable suite order on slave JVMs (disables job stealing).
ant -Dtests.dynamicAssignmentRatio=0 test
# Update global (versioned!) execution times cache (top level).
ant clean test
ant -f lucene/build.xml test-updatecache
#
# Miscellaneous. --------------------------------------------------
#
# Run all tests without stopping on errors (inspect log files!).
ant -Dtests.haltonfailure=false test
# Run more verbose output (slave JVM parameters, etc.).
ant -verbose test
# Display local averaged stats, if any (30 slowest tests).
ant test-times -Dmax=30
# Output test files and reports.
${tests-output}/tests-report.txt - full ASCII tests report
${tests-output}/tests-failures.txt - failures only (if any)
${tests-output}/tests-timehints.txt - execution times (see above)
${tests-output}/tests-report-* - HTML5 report with results
${tests-output}/junit4-*.suites - per-JVM executed suites
(important if job stealing).
</echo>
</target>
<target name="test" depends="compile-test,validate" description="Runs unit tests">
<mkdir dir="${junit.output.dir}"/>
<test-macro threadNum="${tests.threadspercpu}" />
</target>
<target name="junit-sequential" if="tests.sequential">
<test-macro/>
<target name="refresh-checksums">
<checksum algorithm="SHA1">
<fileset dir="${basedir}">
<include name="**/*.jar"/>
</fileset>
</checksum>
</target>
<target name="junit-parallel" unless="tests.sequential">
<parallel threadsPerProcessor="${tests.threadspercpu}">
<test-macro threadNum="1" threadTotal="4"/>
<test-macro threadNum="2" threadTotal="4"/>
<test-macro threadNum="3" threadTotal="4"/>
<test-macro threadNum="4" threadTotal="4"/>
</parallel>
</target>
<!--
If you want clover test code coverage, run this before the tests. You need clover.jar and the license in your ANT classspath and you need to specify -Drun.clover=true on the command line.
<!--
If you want clover test code coverage, run this before the tests. You need clover.jar and the license in your ANT classspath and you need to specify -Drun.clover=true on the command line.
See http://issues.apache.org/jira/browse/LUCENE-721
-->
See http://issues.apache.org/jira/browse/LUCENE-721
-->
<target name="clover" depends="clover.setup, clover.info" description="Instrument the Unit tests using Clover. Requires a Clover 2.x license and clover.jar in the ANT classpath. To use, specify -Drun.clover=true on the command line."/>
<target name="clover.setup" if="clover.enabled">

View File

@ -56,7 +56,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
final String TEXT = "the fox jumped";
final Directory directory = newDirectory();
final IndexWriter indexWriter = new IndexWriter(directory,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
try {
final Document document = new Document();
FieldType customType = new FieldType(TextField.TYPE_UNSTORED);
@ -98,7 +98,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
final String TEXT = "the fox jumped";
final Directory directory = newDirectory();
final IndexWriter indexWriter = new IndexWriter(directory,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
try {
final Document document = new Document();
@ -169,7 +169,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
final String TEXT = "the fox did not jump";
final Directory directory = newDirectory();
final IndexWriter indexWriter = new IndexWriter(directory,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
try {
final Document document = new Document();
@ -212,7 +212,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
final String TEXT = "the fox did not jump";
final Directory directory = newDirectory();
final IndexWriter indexWriter = new IndexWriter(directory,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
try {
final Document document = new Document();
@ -252,7 +252,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
final String TEXT = "the fox did not jump";
final Directory directory = newDirectory();
final IndexWriter indexWriter = new IndexWriter(directory,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
try {
final Document document = new Document();
FieldType customType = new FieldType(TextField.TYPE_UNSTORED);

View File

@ -70,7 +70,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
Directory ramDir;
public IndexSearcher searcher = null;
int numHighlights = 0;
final Analyzer analyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
Analyzer analyzer;
TopDocs hits;
String[] texts = {
@ -79,9 +79,9 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
"JFK has been shot", "John Kennedy has been shot",
"This text has a typo in referring to Keneddy",
"wordx wordy wordz wordx wordy wordx worda wordb wordy wordc", "y z x y z a b", "lets is a the lets is a the lets is a the lets" };
public void testQueryScorerHits() throws Exception {
Analyzer analyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true);
PhraseQuery phraseQuery = new PhraseQuery();
phraseQuery.add(new Term(FIELD_NAME, "very"));
@ -153,9 +153,9 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
* This method intended for use with <tt>testHighlightingWithDefaultField()</tt>
* @throws InvalidTokenOffsetsException
*/
private static String highlightField(Query query, String fieldName, String text)
private String highlightField(Query query, String fieldName, String text)
throws IOException, InvalidTokenOffsetsException {
TokenStream tokenStream = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)
TokenStream tokenStream = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)
.tokenStream(fieldName, new StringReader(text));
// Assuming "<B>", "</B>" used to highlight
SimpleHTMLFormatter formatter = new SimpleHTMLFormatter();
@ -234,7 +234,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
Highlighter h = new Highlighter(this, scorer);
Analyzer analyzer = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false);
h.getBestFragment(analyzer, f1, content);
@ -1166,7 +1166,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
}
public void testMaxSizeHighlight() throws Exception {
final MockAnalyzer analyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
final MockAnalyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
// we disable MockTokenizer checks because we will forcefully limit the
// tokenstream and call end() before incrementToken() returns false.
analyzer.setEnableChecks(false);
@ -1201,7 +1201,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
CharacterRunAutomaton stopWords = new CharacterRunAutomaton(BasicAutomata.makeString("stoppedtoken"));
// we disable MockTokenizer checks because we will forcefully limit the
// tokenstream and call end() before incrementToken() returns false.
final MockAnalyzer analyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopWords, true);
final MockAnalyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, stopWords, true);
analyzer.setEnableChecks(false);
TermQuery query = new TermQuery(new Term("data", goodWord));
@ -1252,7 +1252,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
Highlighter hg = getHighlighter(query, "text", fm);
hg.setTextFragmenter(new NullFragmenter());
hg.setMaxDocCharsToAnalyze(36);
String match = hg.getBestFragment(new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopWords, true), "text", text);
String match = hg.getBestFragment(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, stopWords, true), "text", text);
assertTrue(
"Matched text should contain remainder of text after highlighted query ",
match.endsWith("in it"));
@ -1269,7 +1269,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
numHighlights = 0;
// test to show how rewritten query can still be used
searcher = new IndexSearcher(reader);
Analyzer analyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
BooleanQuery query = new BooleanQuery();
query.add(new WildcardQuery(new Term(FIELD_NAME, "jf?")), Occur.SHOULD);
@ -1611,7 +1611,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
}
private Directory dir;
private Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
private Analyzer a;
public void testWeightedTermsWithDeletes() throws IOException, InvalidTokenOffsetsException {
makeIndex();
@ -1626,7 +1626,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
}
private void makeIndex() throws IOException {
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
writer.addDocument( doc( "t_text1", "random words for highlighting tests del" ) );
writer.addDocument( doc( "t_text1", "more random words for second field del" ) );
writer.addDocument( doc( "t_text1", "random words for highlighting tests del" ) );
@ -1636,7 +1636,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
}
private void deleteDocument() throws IOException {
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setOpenMode(OpenMode.APPEND));
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setOpenMode(OpenMode.APPEND));
writer.deleteDocuments( new Term( "t_text1", "del" ) );
// To see negative idf, keep comment the following line
//writer.forceMerge(1);
@ -1728,10 +1728,13 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
@Override
public void setUp() throws Exception {
super.setUp();
a = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false);
analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
dir = newDirectory();
ramDir = newDirectory();
IndexWriter writer = new IndexWriter(ramDir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)));
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)));
for (String text : texts) {
addDoc(writer, text);
}

View File

@ -88,9 +88,9 @@ public class HighlightCustomQueryTest extends LuceneTestCase {
*
* @throws InvalidTokenOffsetsException
*/
private static String highlightField(Query query, String fieldName,
private String highlightField(Query query, String fieldName,
String text) throws IOException, InvalidTokenOffsetsException {
TokenStream tokenStream = new MockAnalyzer(random, MockTokenizer.SIMPLE,
TokenStream tokenStream = new MockAnalyzer(random(), MockTokenizer.SIMPLE,
true, MockTokenFilter.ENGLISH_STOPSET, true).tokenStream(fieldName,
new StringReader(text));
// Assuming "<B>", "</B>" used to highlight

View File

@ -84,9 +84,9 @@ public abstract class AbstractTestCase extends LuceneTestCase {
@Override
public void setUp() throws Exception {
super.setUp();
analyzerW = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
analyzerW = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false);
analyzerB = new BigramAnalyzer();
analyzerK = new MockAnalyzer(random, MockTokenizer.KEYWORD, false);
analyzerK = new MockAnalyzer(random(), MockTokenizer.KEYWORD, false);
dir = newDirectory();
}

View File

@ -108,14 +108,14 @@ public class MemoryIndexTest extends BaseTokenStreamTestCase {
StringBuilder termField = new StringBuilder();
// add up to 250 terms to field "foo"
final int numFooTerms = random.nextInt(250 * RANDOM_MULTIPLIER);
final int numFooTerms = random().nextInt(250 * RANDOM_MULTIPLIER);
for (int i = 0; i < numFooTerms; i++) {
fooField.append(" ");
fooField.append(randomTerm());
}
// add up to 250 terms to field "term"
final int numTermTerms = random.nextInt(250 * RANDOM_MULTIPLIER);
final int numTermTerms = random().nextInt(250 * RANDOM_MULTIPLIER);
for (int i = 0; i < numTermTerms; i++) {
termField.append(" ");
termField.append(randomTerm());
@ -170,10 +170,10 @@ public class MemoryIndexTest extends BaseTokenStreamTestCase {
* Return a random analyzer (Simple, Stop, Standard) to analyze the terms.
*/
private Analyzer randomAnalyzer() {
switch(random.nextInt(3)) {
case 0: return new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
case 1: return new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
default: return new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
switch(random().nextInt(3)) {
case 0: return new MockAnalyzer(random(), MockTokenizer.SIMPLE, true);
case 1: return new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
default: return new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false);
}
}
@ -192,21 +192,21 @@ public class MemoryIndexTest extends BaseTokenStreamTestCase {
* the other half of the time, returns a random unicode string.
*/
private String randomTerm() {
if (random.nextBoolean()) {
if (random().nextBoolean()) {
// return a random TEST_TERM
return TEST_TERMS[random.nextInt(TEST_TERMS.length)];
return TEST_TERMS[random().nextInt(TEST_TERMS.length)];
} else {
// return a random unicode term
return _TestUtil.randomUnicodeString(random);
return _TestUtil.randomUnicodeString(random());
}
}
public void testDocsEnumStart() throws Exception {
Analyzer analyzer = new MockAnalyzer(random);
Analyzer analyzer = new MockAnalyzer(random());
MemoryIndex memory = new MemoryIndex();
memory.addField("foo", "bar", analyzer);
AtomicReader reader = (AtomicReader) memory.createSearcher().getIndexReader();
DocsEnum disi = _TestUtil.docs(random, reader, "foo", new BytesRef("bar"), null, null, false);
DocsEnum disi = _TestUtil.docs(random(), reader, "foo", new BytesRef("bar"), null, null, false);
int docid = disi.docID();
assertTrue(docid == -1 || docid == DocIdSetIterator.NO_MORE_DOCS);
assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@ -222,7 +222,7 @@ public class MemoryIndexTest extends BaseTokenStreamTestCase {
}
public void testDocsAndPositionsEnumStart() throws Exception {
Analyzer analyzer = new MockAnalyzer(random);
Analyzer analyzer = new MockAnalyzer(random());
MemoryIndex memory = new MemoryIndex(true);
memory.addField("foo", "bar", analyzer);
AtomicReader reader = (AtomicReader) memory.createSearcher().getIndexReader();
@ -250,7 +250,7 @@ public class MemoryIndexTest extends BaseTokenStreamTestCase {
SpanQuery wrappedquery = new SpanMultiTermQueryWrapper<RegexpQuery>(regex);
MemoryIndex mindex = new MemoryIndex();
mindex.addField("field", new MockAnalyzer(random).tokenStream("field", new StringReader("hello there")));
mindex.addField("field", new MockAnalyzer(random()).tokenStream("field", new StringReader("hello there")));
// This throws an NPE
assertEquals(0, mindex.search(wrappedquery), 0.00001f);
@ -262,7 +262,7 @@ public class MemoryIndexTest extends BaseTokenStreamTestCase {
SpanQuery wrappedquery = new SpanOrQuery(new SpanMultiTermQueryWrapper<RegexpQuery>(regex));
MemoryIndex mindex = new MemoryIndex();
mindex.addField("field", new MockAnalyzer(random).tokenStream("field", new StringReader("hello there")));
mindex.addField("field", new MockAnalyzer(random()).tokenStream("field", new StringReader("hello there")));
// This passes though
assertEquals(0, mindex.search(wrappedquery), 0.00001f);

View File

@ -39,7 +39,7 @@ public class TestIndexSplitter extends LuceneTestCase {
mergePolicy.setNoCFSRatio(1);
IndexWriter iw = new IndexWriter(
fsDir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setOpenMode(OpenMode.CREATE).
setMergePolicy(mergePolicy)
);

View File

@ -33,7 +33,7 @@ public class TestMultiPassIndexSplitter extends LuceneTestCase {
public void setUp() throws Exception {
super.setUp();
dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
Document doc;
for (int i = 0; i < NUM_DOCS; i++) {
doc = new Document();

View File

@ -36,7 +36,7 @@ public class TestPKIndexSplitter extends LuceneTestCase {
NumberFormat format = new DecimalFormat("000000000");
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false))
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))
.setOpenMode(OpenMode.CREATE).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
for (int x = 0; x < 11; x++) {
Document doc = createDocument(x, "1", 3, format);
@ -56,7 +56,7 @@ public class TestPKIndexSplitter extends LuceneTestCase {
// delete some documents
w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false))
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))
.setOpenMode(OpenMode.APPEND).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
w.deleteDocuments(midTerm);
w.deleteDocuments(new Term("id", format.format(2)));
@ -71,8 +71,8 @@ public class TestPKIndexSplitter extends LuceneTestCase {
Directory dir1 = newDirectory();
Directory dir2 = newDirectory();
PKIndexSplitter splitter = new PKIndexSplitter(dir, dir1, dir2, splitTerm,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)),
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())),
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
splitter.split();
IndexReader ir1 = IndexReader.open(dir1);

View File

@ -17,6 +17,8 @@ package org.apache.lucene.misc;
* limitations under the License.
*/
import java.util.Random;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.document.Document;
@ -39,8 +41,8 @@ public class TestHighFreqTerms extends LuceneTestCase {
@BeforeClass
public static void setUpClass() throws Exception {
dir = newDirectory();
writer = new IndexWriter(dir, newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false))
writer = new IndexWriter(dir, newIndexWriterConfig(random(),
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))
.setMaxBufferedDocs(2));
indexDocs(writer);
reader = IndexReader.open(dir);
@ -194,7 +196,8 @@ public class TestHighFreqTerms extends LuceneTestCase {
/********************Testing Utils**********************************/
private static void indexDocs(IndexWriter writer) throws Exception {
Random rnd = random();
/**
* Generate 10 documents where term n has a docFreq of n and a totalTermFreq of n*2 (squared).
*/
@ -202,9 +205,9 @@ public class TestHighFreqTerms extends LuceneTestCase {
Document doc = new Document();
String content = getContent(i);
doc.add(newField(random, "FIELD_1", content, TextField.TYPE_STORED));
doc.add(newField(rnd, "FIELD_1", content, TextField.TYPE_STORED));
//add a different field
doc.add(newField(random, "different_field", "diff", TextField.TYPE_STORED));
doc.add(newField(rnd, "different_field", "diff", TextField.TYPE_STORED));
writer.addDocument(doc);
}
@ -212,7 +215,7 @@ public class TestHighFreqTerms extends LuceneTestCase {
//highest freq terms for a specific field.
for (int i = 1; i <= 10; i++) {
Document doc = new Document();
doc.add(newField(random, "different_field", "diff", TextField.TYPE_STORED));
doc.add(newField(rnd, "different_field", "diff", TextField.TYPE_STORED));
writer.addDocument(doc);
}
// add some docs where tf < df so we can see if sorting works
@ -223,7 +226,7 @@ public class TestHighFreqTerms extends LuceneTestCase {
for (int i = 0; i < highTF; i++) {
content += "highTF ";
}
doc.add(newField(random, "FIELD_1", content, TextField.TYPE_STORED));
doc.add(newField(rnd, "FIELD_1", content, TextField.TYPE_STORED));
writer.addDocument(doc);
// highTF medium df =5
int medium_df = 5;
@ -234,7 +237,7 @@ public class TestHighFreqTerms extends LuceneTestCase {
for (int j = 0; j < tf; j++) {
newcontent += "highTFmedDF ";
}
newdoc.add(newField(random, "FIELD_1", newcontent, TextField.TYPE_STORED));
newdoc.add(newField(rnd, "FIELD_1", newcontent, TextField.TYPE_STORED));
writer.addDocument(newdoc);
}
// add a doc with high tf in field different_field
@ -244,7 +247,7 @@ public class TestHighFreqTerms extends LuceneTestCase {
for (int i = 0; i < targetTF; i++) {
content += "TF150 ";
}
doc.add(newField(random, "different_field", content, TextField.TYPE_STORED));
doc.add(newField(rnd, "different_field", content, TextField.TYPE_STORED));
writer.addDocument(doc);
writer.close();

View File

@ -45,7 +45,7 @@ public class DuplicateFilterTest extends LuceneTestCase {
public void setUp() throws Exception {
super.setUp();
directory = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random, directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
RandomIndexWriter writer = new RandomIndexWriter(random(), directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
//Add series of docs with filterable fields : url, text and dates flags
addDoc(writer, "http://lucene.apache.org", "lucene 1.4.3 available", "20040101");
@ -135,7 +135,7 @@ public class DuplicateFilterTest extends LuceneTestCase {
for (ScoreDoc hit : hits) {
Document d = searcher.doc(hit.doc);
String url = d.get(KEY_FIELD);
DocsEnum td = _TestUtil.docs(random, reader,
DocsEnum td = _TestUtil.docs(random(), reader,
KEY_FIELD,
new BytesRef(url),
MultiFields.getLiveDocs(reader),
@ -159,7 +159,7 @@ public class DuplicateFilterTest extends LuceneTestCase {
for (ScoreDoc hit : hits) {
Document d = searcher.doc(hit.doc);
String url = d.get(KEY_FIELD);
DocsEnum td = _TestUtil.docs(random, reader,
DocsEnum td = _TestUtil.docs(random(), reader,
KEY_FIELD,
new BytesRef(url),
MultiFields.getLiveDocs(reader),

View File

@ -38,13 +38,15 @@ public class FuzzyLikeThisQueryTest extends LuceneTestCase {
private Directory directory;
private IndexSearcher searcher;
private IndexReader reader;
private Analyzer analyzer = new MockAnalyzer(random);
private Analyzer analyzer;
@Override
public void setUp() throws Exception {
super.setUp();
analyzer = new MockAnalyzer(random());
directory = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random, directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
RandomIndexWriter writer = new RandomIndexWriter(random(), directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
//Add series of docs with misspelt names
addDoc(writer, "jonathon smythe", "1");
@ -122,7 +124,7 @@ public class FuzzyLikeThisQueryTest extends LuceneTestCase {
}
public void testFuzzyLikeThisQueryEquals() {
Analyzer analyzer = new MockAnalyzer(random);
Analyzer analyzer = new MockAnalyzer(random());
FuzzyLikeThisQuery fltq1 = new FuzzyLikeThisQuery(10, analyzer);
fltq1.addTerms("javi", "subject", 0.5f, 2);
FuzzyLikeThisQuery fltq2 = new FuzzyLikeThisQuery(10, analyzer);

View File

@ -47,22 +47,22 @@ public class TestSlowCollationMethods extends LuceneTestCase {
@BeforeClass
public static void beforeClass() throws Exception {
final Locale locale = LuceneTestCase.randomLocale(random);
final Locale locale = LuceneTestCase.randomLocale(random());
collator = Collator.getInstance(locale);
collator.setStrength(Collator.IDENTICAL);
collator.setDecomposition(Collator.NO_DECOMPOSITION);
numDocs = 1000 * RANDOM_MULTIPLIER;
dir = newDirectory();
RandomIndexWriter iw = new RandomIndexWriter(random, dir);
RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
String value = _TestUtil.randomUnicodeString(random);
String value = _TestUtil.randomUnicodeString(random());
Field field = newField("field", value, StringField.TYPE_STORED);
doc.add(field);
iw.addDocument(doc);
}
splitDoc = _TestUtil.randomUnicodeString(random);
splitDoc = _TestUtil.randomUnicodeString(random());
reader = iw.getReader();
iw.close();
@ -97,13 +97,13 @@ public class TestSlowCollationMethods extends LuceneTestCase {
});
final Sort sort = new Sort(sf);
final TopDocs docs1 = searcher.search(TermRangeQuery.newStringRange("field", null, splitDoc, true, true), null, numDocs/(1+random.nextInt(4)), sort);
final TopDocs docs1 = searcher.search(TermRangeQuery.newStringRange("field", null, splitDoc, true, true), null, numDocs/(1+random().nextInt(4)), sort);
doCheckSorting(docs1);
final TopDocs docs2 = searcher.search(TermRangeQuery.newStringRange("field", splitDoc, null, true, true), null, numDocs/(1+random.nextInt(4)), sort);
final TopDocs docs2 = searcher.search(TermRangeQuery.newStringRange("field", splitDoc, null, true, true), null, numDocs/(1+random().nextInt(4)), sort);
doCheckSorting(docs2);
final TopDocs docs = TopDocs.merge(sort, numDocs/(1+random.nextInt(4)), new TopDocs[]{docs1, docs2});
final TopDocs docs = TopDocs.merge(sort, numDocs/(1+random().nextInt(4)), new TopDocs[]{docs1, docs2});
doCheckSorting(docs);
}
@ -130,8 +130,8 @@ public class TestSlowCollationMethods extends LuceneTestCase {
public void testRangeQuery() throws Exception {
int numQueries = 50*RANDOM_MULTIPLIER;
for (int i = 0; i < numQueries; i++) {
String startPoint = _TestUtil.randomUnicodeString(random);
String endPoint = _TestUtil.randomUnicodeString(random);
String startPoint = _TestUtil.randomUnicodeString(random());
String endPoint = _TestUtil.randomUnicodeString(random());
Query query = new SlowCollatedTermRangeQuery("field", startPoint, endPoint, true, true, collator);
doTestRanges(startPoint, endPoint, query);
}
@ -140,8 +140,8 @@ public class TestSlowCollationMethods extends LuceneTestCase {
public void testRangeFilter() throws Exception {
int numQueries = 50*RANDOM_MULTIPLIER;
for (int i = 0; i < numQueries; i++) {
String startPoint = _TestUtil.randomUnicodeString(random);
String endPoint = _TestUtil.randomUnicodeString(random);
String startPoint = _TestUtil.randomUnicodeString(random());
String endPoint = _TestUtil.randomUnicodeString(random());
Query query = new ConstantScoreQuery(new SlowCollatedTermRangeFilter("field", startPoint, endPoint, true, true, collator));
doTestRanges(startPoint, endPoint, query);
}

View File

@ -45,7 +45,7 @@ public class TestRegexQuery extends LuceneTestCase {
public void setUp() throws Exception {
super.setUp();
directory = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random, directory);
RandomIndexWriter writer = new RandomIndexWriter(random(), directory);
Document doc = new Document();
doc.add(newField(FN, "the quick brown fox jumps over the lazy dog", TextField.TYPE_UNSTORED));
writer.addDocument(doc);

View File

@ -58,7 +58,7 @@ public class TestSpanRegexQuery extends LuceneTestCase {
public void testSpanRegex() throws Exception {
Directory directory = newDirectory();
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document doc = new Document();
// doc.add(newField("field", "the quick brown fox jumps over the lazy dog",
// Field.Store.NO, Field.Index.ANALYZED));

View File

@ -39,13 +39,13 @@ import org.apache.lucene.util.LuceneTestCase;
public class TestDemo extends LuceneTestCase {
public void testDemo() throws IOException {
Analyzer analyzer = new MockAnalyzer(random);
Analyzer analyzer = new MockAnalyzer(random());
// Store the index in memory:
Directory directory = newDirectory();
// To store an index on disk, use this instead:
//Directory directory = FSDirectory.open("/tmp/testindex");
RandomIndexWriter iwriter = new RandomIndexWriter(random, directory, analyzer);
RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, analyzer);
Document doc = new Document();
String longTerm = "longtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongterm";
String text = "This is the text to be indexed. " + longTerm;

View File

@ -63,7 +63,7 @@ public class TestExternalCodecs extends LuceneTestCase {
dir.setCheckIndexOnClose(false); // we use a custom codec provider
IndexWriter w = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setCodec(new CustomPerFieldCodec()).
setMergePolicy(newLogMergePolicy(3))
);

View File

@ -95,7 +95,7 @@ public class TestMergeSchedulerExternal extends LuceneTestCase {
doc.add(idField);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergeScheduler(new MyMergeScheduler())
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergeScheduler(new MyMergeScheduler())
.setMaxBufferedDocs(2).setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setMergePolicy(newLogMergePolicy()));
LogMergePolicy logMP = (LogMergePolicy) writer.getConfig().getMergePolicy();

View File

@ -46,7 +46,7 @@ public class TestSearch extends LuceneTestCase {
public void testSearch() throws Exception {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw, true);
doTestSearch(random, pw, false);
doTestSearch(random(), pw, false);
pw.close();
sw.close();
String multiFileOutput = sw.getBuffer().toString();
@ -54,7 +54,7 @@ public class TestSearch extends LuceneTestCase {
sw = new StringWriter();
pw = new PrintWriter(sw, true);
doTestSearch(random, pw, true);
doTestSearch(random(), pw, true);
pw.close();
sw.close();
String singleFileOutput = sw.getBuffer().toString();

View File

@ -50,7 +50,7 @@ public class TestSearchForDuplicates extends LuceneTestCase {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw, true);
final int MAX_DOCS = atLeast(225);
doTest(random, pw, false, MAX_DOCS);
doTest(random(), pw, false, MAX_DOCS);
pw.close();
sw.close();
String multiFileOutput = sw.getBuffer().toString();
@ -58,7 +58,7 @@ public class TestSearchForDuplicates extends LuceneTestCase {
sw = new StringWriter();
pw = new PrintWriter(sw, true);
doTest(random, pw, true, MAX_DOCS);
doTest(random(), pw, true, MAX_DOCS);
pw.close();
sw.close();
String singleFileOutput = sw.getBuffer().toString();

View File

@ -37,7 +37,7 @@ public class TestCachingTokenFilter extends BaseTokenStreamTestCase {
public void testCaching() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random, dir);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
TokenStream stream = new TokenStream() {
private int index = 0;

View File

@ -21,6 +21,7 @@ import java.io.IOException;
import java.io.Reader;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
@ -165,12 +166,12 @@ public class TestGraphTokenizers extends BaseTokenStreamTestCase {
@Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
final Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
final TokenStream t2 = new MockGraphTokenFilter(random, t);
final TokenStream t2 = new MockGraphTokenFilter(random(), t);
return new TokenStreamComponents(t, t2);
}
};
checkAnalysisConsistency(random, a, false, "a b c d e f g h i j k");
checkAnalysisConsistency(random(), a, false, "a b c d e f g h i j k");
}
}
@ -187,12 +188,12 @@ public class TestGraphTokenizers extends BaseTokenStreamTestCase {
@Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
final Tokenizer t = new GraphTokenizer(reader);
final TokenStream t2 = new MockGraphTokenFilter(random, t);
final TokenStream t2 = new MockGraphTokenFilter(random(), t);
return new TokenStreamComponents(t, t2);
}
};
checkAnalysisConsistency(random, a, false, "a/x:3 c/y:2 d e f/z:4 g h i j k");
checkAnalysisConsistency(random(), a, false, "a/x:3 c/y:2 d e f/z:4 g h i j k");
}
}
@ -249,12 +250,13 @@ public class TestGraphTokenizers extends BaseTokenStreamTestCase {
@Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
final Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
final TokenStream t2 = new MockGraphTokenFilter(random, t);
final TokenStream t2 = new MockGraphTokenFilter(random(), t);
final TokenStream t3 = new RemoveATokens(t2);
return new TokenStreamComponents(t, t3);
}
};
Random random = random();
checkAnalysisConsistency(random, a, false, "a b c d e f g h i j k");
checkAnalysisConsistency(random, a, false, "x y a b c d e f g h i j k");
checkAnalysisConsistency(random, a, false, "a b c d e f g h i j k a");
@ -276,11 +278,12 @@ public class TestGraphTokenizers extends BaseTokenStreamTestCase {
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
final Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
final TokenStream t2 = new RemoveATokens(t);
final TokenStream t3 = new MockGraphTokenFilter(random, t2);
final TokenStream t3 = new MockGraphTokenFilter(random(), t2);
return new TokenStreamComponents(t, t3);
}
};
Random random = random();
checkAnalysisConsistency(random, a, false, "a b c d e f g h i j k");
checkAnalysisConsistency(random, a, false, "x y a b c d e f g h i j k");
checkAnalysisConsistency(random, a, false, "a b c d e f g h i j k a");
@ -301,11 +304,12 @@ public class TestGraphTokenizers extends BaseTokenStreamTestCase {
@Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
final Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
final TokenStream t2 = new MockGraphTokenFilter(random, t);
final TokenStream t2 = new MockGraphTokenFilter(random(), t);
return new TokenStreamComponents(t, t2);
}
};
Random random = random();
checkRandomData(random, a, 5, atLeast(1000));
}
}
@ -324,12 +328,13 @@ public class TestGraphTokenizers extends BaseTokenStreamTestCase {
@Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
final Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
final TokenStream t1 = new MockGraphTokenFilter(random, t);
final TokenStream t2 = new MockGraphTokenFilter(random, t1);
final TokenStream t1 = new MockGraphTokenFilter(random(), t);
final TokenStream t2 = new MockGraphTokenFilter(random(), t1);
return new TokenStreamComponents(t, t2);
}
};
Random random = random();
checkRandomData(random, a, 5, atLeast(1000));
}
}
@ -347,12 +352,13 @@ public class TestGraphTokenizers extends BaseTokenStreamTestCase {
@Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
final Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
final TokenStream t1 = new MockGraphTokenFilter(random, t);
final TokenStream t2 = new MockHoleInjectingTokenFilter(random, t1);
final TokenStream t1 = new MockGraphTokenFilter(random(), t);
final TokenStream t2 = new MockHoleInjectingTokenFilter(random(), t1);
return new TokenStreamComponents(t, t2);
}
};
Random random = random();
checkRandomData(random, a, 5, atLeast(1000));
}
}
@ -370,12 +376,13 @@ public class TestGraphTokenizers extends BaseTokenStreamTestCase {
@Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
final Tokenizer t = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
final TokenStream t1 = new MockHoleInjectingTokenFilter(random, t);
final TokenStream t2 = new MockGraphTokenFilter(random, t1);
final TokenStream t1 = new MockHoleInjectingTokenFilter(random(), t);
final TokenStream t2 = new MockGraphTokenFilter(random(), t1);
return new TokenStreamComponents(t, t2);
}
};
Random random = random();
checkRandomData(random, a, 5, atLeast(1000));
}
}

View File

@ -19,6 +19,7 @@ package org.apache.lucene.analysis;
import java.io.IOException;
import java.io.Reader;
import java.util.Random;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
@ -28,12 +29,13 @@ public class TestLookaheadTokenFilter extends BaseTokenStreamTestCase {
Analyzer a = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
Random random = random();
Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, random.nextBoolean());
TokenStream output = new MockRandomLookaheadTokenFilter(random, tokenizer);
return new TokenStreamComponents(tokenizer, output);
}
};
checkRandomData(random, a, 200*RANDOM_MULTIPLIER, 8192);
checkRandomData(random(), a, 200*RANDOM_MULTIPLIER, 8192);
}
private static class NeverPeeksLookaheadTokenFilter extends LookaheadTokenFilter<LookaheadTokenFilter.Position> {
@ -56,11 +58,11 @@ public class TestLookaheadTokenFilter extends BaseTokenStreamTestCase {
Analyzer a = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, random.nextBoolean());
Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, random().nextBoolean());
TokenStream output = new NeverPeeksLookaheadTokenFilter(tokenizer);
return new TokenStreamComponents(tokenizer, output);
}
};
checkRandomData(random, a, 200*RANDOM_MULTIPLIER, 8192);
checkRandomData(random(), a, 200*RANDOM_MULTIPLIER, 8192);
}
}

View File

@ -31,7 +31,7 @@ public class TestMockAnalyzer extends BaseTokenStreamTestCase {
/** Test a configuration that behaves a lot like WhitespaceAnalyzer */
public void testWhitespace() throws Exception {
Analyzer a = new MockAnalyzer(random);
Analyzer a = new MockAnalyzer(random());
assertAnalyzesTo(a, "A bc defg hiJklmn opqrstuv wxy z ",
new String[] { "a", "bc", "defg", "hijklmn", "opqrstuv", "wxy", "z" });
assertAnalyzesToReuse(a, "aba cadaba shazam",
@ -42,7 +42,7 @@ public class TestMockAnalyzer extends BaseTokenStreamTestCase {
/** Test a configuration that behaves a lot like SimpleAnalyzer */
public void testSimple() throws Exception {
Analyzer a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
Analyzer a = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true);
assertAnalyzesTo(a, "a-bc123 defg+hijklmn567opqrstuv78wxy_z ",
new String[] { "a", "bc", "defg", "hijklmn", "opqrstuv", "wxy", "z" });
assertAnalyzesToReuse(a, "aba4cadaba-Shazam",
@ -53,7 +53,7 @@ public class TestMockAnalyzer extends BaseTokenStreamTestCase {
/** Test a configuration that behaves a lot like KeywordAnalyzer */
public void testKeyword() throws Exception {
Analyzer a = new MockAnalyzer(random, MockTokenizer.KEYWORD, false);
Analyzer a = new MockAnalyzer(random(), MockTokenizer.KEYWORD, false);
assertAnalyzesTo(a, "a-bc123 defg+hijklmn567opqrstuv78wxy_z ",
new String[] { "a-bc123 defg+hijklmn567opqrstuv78wxy_z " });
assertAnalyzesToReuse(a, "aba4cadaba-Shazam",
@ -64,13 +64,13 @@ public class TestMockAnalyzer extends BaseTokenStreamTestCase {
/** Test a configuration that behaves a lot like StopAnalyzer */
public void testStop() throws Exception {
Analyzer a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
Analyzer a = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
assertAnalyzesTo(a, "the quick brown a fox",
new String[] { "quick", "brown", "fox" },
new int[] { 2, 1, 2 });
// disable positions
a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, false);
a = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, false);
assertAnalyzesTo(a, "the quick brown a fox",
new String[] { "quick", "brown", "fox" },
new int[] { 1, 1, 1 });
@ -83,7 +83,7 @@ public class TestMockAnalyzer extends BaseTokenStreamTestCase {
BasicOperations.complement(
Automaton.union(
Arrays.asList(BasicAutomata.makeString("foo"), BasicAutomata.makeString("bar")))));
Analyzer a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, keepWords, true);
Analyzer a = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, keepWords, true);
assertAnalyzesTo(a, "quick foo brown bar bar fox foo",
new String[] { "foo", "bar", "bar", "foo" },
new int[] { 2, 2, 1, 2 });
@ -92,7 +92,7 @@ public class TestMockAnalyzer extends BaseTokenStreamTestCase {
/** Test a configuration that behaves a lot like LengthFilter */
public void testLength() throws Exception {
CharacterRunAutomaton length5 = new CharacterRunAutomaton(new RegExp(".{5,}").toAutomaton());
Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, true, length5, true);
Analyzer a = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, true, length5, true);
assertAnalyzesTo(a, "ok toolong fine notfine",
new String[] { "ok", "fine" },
new int[] { 1, 2 });
@ -101,7 +101,7 @@ public class TestMockAnalyzer extends BaseTokenStreamTestCase {
public void testLUCENE_3042() throws Exception {
String testString = "t";
Analyzer analyzer = new MockAnalyzer(random);
Analyzer analyzer = new MockAnalyzer(random());
TokenStream stream = analyzer.tokenStream("dummy", new StringReader(testString));
stream.reset();
while (stream.incrementToken()) {
@ -115,16 +115,16 @@ public class TestMockAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random, new MockAnalyzer(random), atLeast(1000));
checkRandomData(random(), new MockAnalyzer(random()), atLeast(1000));
}
public void testForwardOffsets() throws Exception {
int num = atLeast(10000);
for (int i = 0; i < num; i++) {
String s = _TestUtil.randomHtmlishString(random, 20);
String s = _TestUtil.randomHtmlishString(random(), 20);
StringReader reader = new StringReader(s);
MockCharFilter charfilter = new MockCharFilter(CharReader.get(reader), 2);
MockAnalyzer analyzer = new MockAnalyzer(random);
MockAnalyzer analyzer = new MockAnalyzer(random());
TokenStream ts = analyzer.tokenStream("bogus", charfilter);
ts.reset();
while (ts.incrementToken()) {

View File

@ -108,8 +108,8 @@ public class TestAppendingCodec extends LuceneTestCase {
private static final String text = "the quick brown fox jumped over the lazy dog";
public void testCodec() throws Exception {
Directory dir = new AppendingRAMDirectory(random, new RAMDirectory());
IndexWriterConfig cfg = new IndexWriterConfig(Version.LUCENE_40, new MockAnalyzer(random));
Directory dir = new AppendingRAMDirectory(random(), new RAMDirectory());
IndexWriterConfig cfg = new IndexWriterConfig(Version.LUCENE_40, new MockAnalyzer(random()));
cfg.setCodec(new AppendingCodec());
((TieredMergePolicy)cfg.getMergePolicy()).setUseCompoundFile(false);
@ -150,8 +150,8 @@ public class TestAppendingCodec extends LuceneTestCase {
}
public void testCompoundFile() throws Exception {
Directory dir = new AppendingRAMDirectory(random, new RAMDirectory());
IndexWriterConfig cfg = new IndexWriterConfig(Version.LUCENE_40, new MockAnalyzer(random));
Directory dir = new AppendingRAMDirectory(random(), new RAMDirectory());
IndexWriterConfig cfg = new IndexWriterConfig(Version.LUCENE_40, new MockAnalyzer(random()));
TieredMergePolicy mp = new TieredMergePolicy();
mp.setUseCompoundFile(true);
mp.setNoCFSRatio(1.0);

View File

@ -29,13 +29,13 @@ public class TestIntBlockCodec extends LuceneTestCase {
IntStreamFactory f = new MockFixedIntBlockPostingsFormat(128).getIntFactory();
IntIndexOutput out = f.createOutput(dir, "test", newIOContext(random));
IntIndexOutput out = f.createOutput(dir, "test", newIOContext(random()));
for(int i=0;i<11777;i++) {
out.write(i);
}
out.close();
IntIndexInput in = f.openInput(dir, "test", newIOContext(random));
IntIndexInput in = f.openInput(dir, "test", newIOContext(random()));
IntIndexInput.Reader r = in.reader();
for(int i=0;i<11777;i++) {
@ -50,12 +50,12 @@ public class TestIntBlockCodec extends LuceneTestCase {
Directory dir = newDirectory();
IntStreamFactory f = new MockFixedIntBlockPostingsFormat(128).getIntFactory();
IntIndexOutput out = f.createOutput(dir, "test", newIOContext(random));
IntIndexOutput out = f.createOutput(dir, "test", newIOContext(random()));
// write no ints
out.close();
IntIndexInput in = f.openInput(dir, "test", newIOContext(random));
IntIndexInput in = f.openInput(dir, "test", newIOContext(random()));
in.reader();
// read no ints
in.close();

View File

@ -287,12 +287,12 @@ public class TestSurrogates extends LuceneTestCase {
@Test
public void testSurrogatesOrder() throws Exception {
Directory dir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random,
RandomIndexWriter w = new RandomIndexWriter(random(),
dir,
newIndexWriterConfig( TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setCodec(new PreFlexRWCodec()));
new MockAnalyzer(random())).setCodec(new PreFlexRWCodec()));
final int numField = _TestUtil.nextInt(random, 2, 5);
final int numField = _TestUtil.nextInt(random(), 2, 5);
int uniqueTermCount = 0;
@ -307,7 +307,7 @@ public class TestSurrogates extends LuceneTestCase {
final Set<String> uniqueTerms = new HashSet<String>();
for(int i=0;i<numTerms;i++) {
String term = getRandomString(random) + "_ " + (tc++);
String term = getRandomString(random()) + "_ " + (tc++);
uniqueTerms.add(term);
fieldTerms.add(new Term(field, term));
Document doc = new Document();
@ -346,8 +346,8 @@ public class TestSurrogates extends LuceneTestCase {
//assertNotNull(fields);
doTestStraightEnum(fieldTerms, reader, uniqueTermCount);
doTestSeekExists(random, fieldTerms, reader);
doTestSeekDoesNotExist(random, numField, fieldTerms, fieldTermsArray, reader);
doTestSeekExists(random(), fieldTerms, reader);
doTestSeekDoesNotExist(random(), numField, fieldTerms, fieldTermsArray, reader);
reader.close();
w.close();

View File

@ -71,10 +71,10 @@ public class TestTermInfosReaderIndex extends LuceneTestCase {
public static void beforeClass() throws Exception {
LuceneTestCase.PREFLEX_IMPERSONATION_IS_ACTIVE = true;
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random, MockTokenizer.KEYWORD, false));
new MockAnalyzer(random(), MockTokenizer.KEYWORD, false));
termIndexInterval = config.getTermIndexInterval();
indexDivisor = _TestUtil.nextInt(random, 1, 10);
indexDivisor = _TestUtil.nextInt(random(), 1, 10);
NUMBER_OF_DOCUMENTS = atLeast(100);
NUMBER_OF_FIELDS = atLeast(Math.max(10, 3*termIndexInterval*indexDivisor/NUMBER_OF_DOCUMENTS));
@ -98,8 +98,8 @@ public class TestTermInfosReaderIndex extends LuceneTestCase {
FieldInfos fieldInfos = infosReader.read(directory, segment, IOContext.READONCE);
String segmentFileName = IndexFileNames.segmentFileName(segment, "", Lucene3xPostingsFormat.TERMS_INDEX_EXTENSION);
long tiiFileLength = directory.fileLength(segmentFileName);
IndexInput input = directory.openInput(segmentFileName, newIOContext(random));
termEnum = new SegmentTermEnum(directory.openInput(IndexFileNames.segmentFileName(segment, "", Lucene3xPostingsFormat.TERMS_EXTENSION), newIOContext(random)), fieldInfos, false);
IndexInput input = directory.openInput(segmentFileName, newIOContext(random()));
termEnum = new SegmentTermEnum(directory.openInput(IndexFileNames.segmentFileName(segment, "", Lucene3xPostingsFormat.TERMS_EXTENSION), newIOContext(random())), fieldInfos, false);
int totalIndexInterval = termEnum.indexInterval * indexDivisor;
SegmentTermEnum indexEnum = new SegmentTermEnum(input, fieldInfos, true);
@ -134,7 +134,7 @@ public class TestTermInfosReaderIndex extends LuceneTestCase {
}
public void testCompareTo() throws IOException {
Term term = new Term("field" + random.nextInt(NUMBER_OF_FIELDS) ,getText());
Term term = new Term("field" + random().nextInt(NUMBER_OF_FIELDS) ,getText());
for (int i = 0; i < index.length(); i++) {
Term t = index.getTerm(i);
int compareTo = term.compareTo(t);
@ -190,7 +190,7 @@ public class TestTermInfosReaderIndex extends LuceneTestCase {
}
private static void populate(Directory directory, IndexWriterConfig config) throws CorruptIndexException, LockObtainFailedException, IOException {
RandomIndexWriter writer = new RandomIndexWriter(random, directory, config);
RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config);
for (int i = 0; i < NUMBER_OF_DOCUMENTS; i++) {
Document document = new Document();
for (int f = 0; f < NUMBER_OF_FIELDS; f++) {
@ -201,8 +201,8 @@ public class TestTermInfosReaderIndex extends LuceneTestCase {
writer.forceMerge(1);
writer.close();
}
private static String getText() {
return Long.toString(random.nextLong(),Character.MAX_RADIX);
return Long.toString(random().nextLong(),Character.MAX_RADIX);
}
}

View File

@ -139,7 +139,7 @@ public class TestBitVector extends LuceneTestCase
}
private void doTestWriteRead(int n) throws Exception {
MockDirectoryWrapper d = new MockDirectoryWrapper(random, new RAMDirectory());
MockDirectoryWrapper d = new MockDirectoryWrapper(random(), new RAMDirectory());
d.setPreventDoubleWrite(false);
BitVector bv = new BitVector(n);
// test count when incrementally setting bits
@ -149,8 +149,8 @@ public class TestBitVector extends LuceneTestCase
bv.set(i);
assertTrue(bv.get(i));
assertEquals(i+1,bv.count());
bv.write(d, "TESTBV", newIOContext(random));
BitVector compare = new BitVector(d, "TESTBV", newIOContext(random));
bv.write(d, "TESTBV", newIOContext(random()));
BitVector compare = new BitVector(d, "TESTBV", newIOContext(random()));
// compare bit vectors with bits set incrementally
assertTrue(doCompare(bv,compare));
}
@ -168,7 +168,7 @@ public class TestBitVector extends LuceneTestCase
doTestDgaps(100000,415,418);
doTestDgaps(1000000,3123,3126);
// now exercise skipping of fully populated byte in the bitset (they are omitted if bitset is sparse)
MockDirectoryWrapper d = new MockDirectoryWrapper(random, new RAMDirectory());
MockDirectoryWrapper d = new MockDirectoryWrapper(random(), new RAMDirectory());
d.setPreventDoubleWrite(false);
BitVector bv = new BitVector(10000);
bv.set(0);
@ -180,17 +180,17 @@ public class TestBitVector extends LuceneTestCase
} // get a second byte full of set bits
// add some more bits here
for (int i = 40; i < 10000; i++) {
if (random.nextInt(1000) == 0) {
if (random().nextInt(1000) == 0) {
bv.set(i);
}
}
bv.write(d, "TESTBV", newIOContext(random));
BitVector compare = new BitVector(d, "TESTBV", newIOContext(random));
bv.write(d, "TESTBV", newIOContext(random()));
BitVector compare = new BitVector(d, "TESTBV", newIOContext(random()));
assertTrue(doCompare(bv,compare));
}
private void doTestDgaps(int size, int count1, int count2) throws IOException {
MockDirectoryWrapper d = new MockDirectoryWrapper(random, new RAMDirectory());
MockDirectoryWrapper d = new MockDirectoryWrapper(random(), new RAMDirectory());
d.setPreventDoubleWrite(false);
BitVector bv = new BitVector(size);
bv.invertAll();
@ -198,24 +198,24 @@ public class TestBitVector extends LuceneTestCase
bv.clear(i);
assertEquals(i+1,size-bv.count());
}
bv.write(d, "TESTBV", newIOContext(random));
bv.write(d, "TESTBV", newIOContext(random()));
// gradually increase number of set bits
for (int i=count1; i<count2; i++) {
BitVector bv2 = new BitVector(d, "TESTBV", newIOContext(random));
BitVector bv2 = new BitVector(d, "TESTBV", newIOContext(random()));
assertTrue(doCompare(bv,bv2));
bv = bv2;
bv.clear(i);
assertEquals(i+1, size-bv.count());
bv.write(d, "TESTBV", newIOContext(random));
bv.write(d, "TESTBV", newIOContext(random()));
}
// now start decreasing number of set bits
for (int i=count2-1; i>=count1; i--) {
BitVector bv2 = new BitVector(d, "TESTBV", newIOContext(random));
BitVector bv2 = new BitVector(d, "TESTBV", newIOContext(random()));
assertTrue(doCompare(bv,bv2));
bv = bv2;
bv.set(i);
assertEquals(i,size-bv.count());
bv.write(d, "TESTBV", newIOContext(random));
bv.write(d, "TESTBV", newIOContext(random()));
}
}
@ -224,11 +224,11 @@ public class TestBitVector extends LuceneTestCase
final int numBits = 10240;
BitVector bv = new BitVector(numBits);
bv.invertAll();
int numToClear = random.nextInt(5);
int numToClear = random().nextInt(5);
for(int i=0;i<numToClear;i++) {
bv.clear(random.nextInt(numBits));
bv.clear(random().nextInt(numBits));
}
bv.write(d, "test", newIOContext(random));
bv.write(d, "test", newIOContext(random()));
final long size = d.fileLength("test");
assertTrue("size=" + size, size < 100);
d.close();
@ -236,24 +236,24 @@ public class TestBitVector extends LuceneTestCase
public void testClearedBitNearEnd() throws IOException {
Directory d = newDirectory();
final int numBits = _TestUtil.nextInt(random, 7, 1000);
final int numBits = _TestUtil.nextInt(random(), 7, 1000);
BitVector bv = new BitVector(numBits);
bv.invertAll();
bv.clear(numBits-_TestUtil.nextInt(random, 1, 7));
bv.write(d, "test", newIOContext(random));
bv.clear(numBits-_TestUtil.nextInt(random(), 1, 7));
bv.write(d, "test", newIOContext(random()));
assertEquals(numBits-1, bv.count());
d.close();
}
public void testMostlySet() throws IOException {
Directory d = newDirectory();
final int numBits = _TestUtil.nextInt(random, 30, 1000);
final int numBits = _TestUtil.nextInt(random(), 30, 1000);
for(int numClear=0;numClear<20;numClear++) {
BitVector bv = new BitVector(numBits);
bv.invertAll();
int count = 0;
while(count < numClear) {
final int bit = random.nextInt(numBits);
final int bit = random().nextInt(numBits);
// Don't use getAndClear, so that count is recomputed
if (bv.get(bit)) {
bv.clear(bit);

View File

@ -43,10 +43,10 @@ public class TestReuseDocsEnum extends LuceneTestCase {
public void testReuseDocsEnumNoReuse() throws IOException {
Directory dir = newDirectory();
Codec cp = _TestUtil.alwaysPostingsFormat(new Lucene40PostingsFormat());
RandomIndexWriter writer = new RandomIndexWriter(random, dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setCodec(cp));
RandomIndexWriter writer = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp));
int numdocs = atLeast(20);
createRandomIndex(numdocs, writer, random);
createRandomIndex(numdocs, writer, random());
writer.commit();
DirectoryReader open = DirectoryReader.open(dir);
@ -58,7 +58,7 @@ public class TestReuseDocsEnum extends LuceneTestCase {
IdentityHashMap<DocsEnum, Boolean> enums = new IdentityHashMap<DocsEnum, Boolean>();
MatchNoBits bits = new Bits.MatchNoBits(r.maxDoc());
while ((iterator.next()) != null) {
DocsEnum docs = iterator.docs(random.nextBoolean() ? bits : new Bits.MatchNoBits(r.maxDoc()), null, random.nextBoolean());
DocsEnum docs = iterator.docs(random().nextBoolean() ? bits : new Bits.MatchNoBits(r.maxDoc()), null, random().nextBoolean());
enums.put(docs, true);
}
@ -72,10 +72,10 @@ public class TestReuseDocsEnum extends LuceneTestCase {
public void testReuseDocsEnumSameBitsOrNull() throws IOException {
Directory dir = newDirectory();
Codec cp = _TestUtil.alwaysPostingsFormat(new Lucene40PostingsFormat());
RandomIndexWriter writer = new RandomIndexWriter(random, dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setCodec(cp));
RandomIndexWriter writer = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp));
int numdocs = atLeast(20);
createRandomIndex(numdocs, writer, random);
createRandomIndex(numdocs, writer, random());
writer.commit();
DirectoryReader open = DirectoryReader.open(dir);
@ -87,7 +87,7 @@ public class TestReuseDocsEnum extends LuceneTestCase {
MatchNoBits bits = new Bits.MatchNoBits(open.maxDoc());
DocsEnum docs = null;
while ((iterator.next()) != null) {
docs = iterator.docs(bits, docs, random.nextBoolean());
docs = iterator.docs(bits, docs, random().nextBoolean());
enums.put(docs, true);
}
@ -96,7 +96,7 @@ public class TestReuseDocsEnum extends LuceneTestCase {
iterator = terms.iterator(null);
docs = null;
while ((iterator.next()) != null) {
docs = iterator.docs(new Bits.MatchNoBits(open.maxDoc()), docs, random.nextBoolean());
docs = iterator.docs(new Bits.MatchNoBits(open.maxDoc()), docs, random().nextBoolean());
enums.put(docs, true);
}
assertEquals(terms.size(), enums.size());
@ -105,7 +105,7 @@ public class TestReuseDocsEnum extends LuceneTestCase {
iterator = terms.iterator(null);
docs = null;
while ((iterator.next()) != null) {
docs = iterator.docs(null, docs, random.nextBoolean());
docs = iterator.docs(null, docs, random().nextBoolean());
enums.put(docs, true);
}
assertEquals(1, enums.size());
@ -117,10 +117,10 @@ public class TestReuseDocsEnum extends LuceneTestCase {
public void testReuseDocsEnumDifferentReader() throws IOException {
Directory dir = newDirectory();
Codec cp = _TestUtil.alwaysPostingsFormat(new Lucene40PostingsFormat());
RandomIndexWriter writer = new RandomIndexWriter(random, dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setCodec(cp));
RandomIndexWriter writer = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp));
int numdocs = atLeast(20);
createRandomIndex(numdocs, writer, random);
createRandomIndex(numdocs, writer, random());
writer.commit();
DirectoryReader firstReader = DirectoryReader.open(dir);
@ -137,7 +137,7 @@ public class TestReuseDocsEnum extends LuceneTestCase {
DocsEnum docs = null;
BytesRef term = null;
while ((term = iterator.next()) != null) {
docs = iterator.docs(null, randomDocsEnum("body", term, sequentialSubReaders2, bits), random.nextBoolean());
docs = iterator.docs(null, randomDocsEnum("body", term, sequentialSubReaders2, bits), random().nextBoolean());
enums.put(docs, true);
}
assertEquals(terms.size(), enums.size());
@ -146,7 +146,7 @@ public class TestReuseDocsEnum extends LuceneTestCase {
enums.clear();
docs = null;
while ((term = iterator.next()) != null) {
docs = iterator.docs(bits, randomDocsEnum("body", term, sequentialSubReaders2, bits), random.nextBoolean());
docs = iterator.docs(bits, randomDocsEnum("body", term, sequentialSubReaders2, bits), random().nextBoolean());
enums.put(docs, true);
}
assertEquals(terms.size(), enums.size());
@ -155,11 +155,11 @@ public class TestReuseDocsEnum extends LuceneTestCase {
}
public DocsEnum randomDocsEnum(String field, BytesRef term, IndexReader[] readers, Bits bits) throws IOException {
if (random.nextInt(10) == 0) {
if (random().nextInt(10) == 0) {
return null;
}
AtomicReader indexReader = (AtomicReader) readers[random.nextInt(readers.length)];
return indexReader.termDocsEnum(bits, field, term, random.nextBoolean());
AtomicReader indexReader = (AtomicReader) readers[random().nextInt(readers.length)];
return indexReader.termDocsEnum(bits, field, term, random().nextBoolean());
}
/**

View File

@ -20,6 +20,7 @@ package org.apache.lucene.codecs.lucene40.values;
import java.io.IOException;
import java.io.Reader;
import java.util.Comparator;
import java.util.Random;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
@ -69,18 +70,18 @@ public class TestDocValues extends LuceneTestCase {
Directory dir = newDirectory();
final Counter trackBytes = Counter.newCounter();
DocValuesConsumer w = Bytes.getWriter(dir, "test", mode, fixedSize, COMP, trackBytes, newIOContext(random),
random.nextBoolean());
DocValuesConsumer w = Bytes.getWriter(dir, "test", mode, fixedSize, COMP, trackBytes, newIOContext(random()),
random().nextBoolean());
int maxDoc = 220;
final String[] values = new String[maxDoc];
final int fixedLength = 1 + atLeast(50);
for (int i = 0; i < 100; i++) {
final String s;
if (i > 0 && random.nextInt(5) <= 2) {
if (i > 0 && random().nextInt(5) <= 2) {
// use prior value
s = values[2 * random.nextInt(i)];
s = values[2 * random().nextInt(i)];
} else {
s = _TestUtil.randomFixedByteLengthUnicodeString(random, fixedSize? fixedLength : 1 + random.nextInt(39));
s = _TestUtil.randomFixedByteLengthUnicodeString(random(), fixedSize? fixedLength : 1 + random().nextInt(39));
}
values[2 * i] = s;
@ -91,7 +92,7 @@ public class TestDocValues extends LuceneTestCase {
w.finish(maxDoc);
assertEquals(0, trackBytes.get());
DocValues r = Bytes.getValues(dir, "test", mode, fixedSize, maxDoc, COMP, newIOContext(random));
DocValues r = Bytes.getValues(dir, "test", mode, fixedSize, maxDoc, COMP, newIOContext(random()));
// Verify we can load source twice:
for (int iter = 0; iter < 2; iter++) {
@ -123,6 +124,7 @@ public class TestDocValues extends LuceneTestCase {
// Lookup random strings:
if (mode == Bytes.Mode.SORTED) {
final int valueCount = ss.getValueCount();
Random random = random();
for (int i = 0; i < 1000; i++) {
BytesRef bytesValue = new BytesRef(_TestUtil.randomFixedByteLengthUnicodeString(random, fixedSize? fixedLength : 1 + random.nextInt(39)));
int ord = ss.getOrdByValue(bytesValue, new BytesRef());
@ -178,14 +180,14 @@ public class TestDocValues extends LuceneTestCase {
for (int i = 0; i < minMax.length; i++) {
Directory dir = newDirectory();
final Counter trackBytes = Counter.newCounter();
DocValuesConsumer w = Ints.getWriter(dir, "test", trackBytes, Type.VAR_INTS, newIOContext(random));
DocValuesConsumer w = Ints.getWriter(dir, "test", trackBytes, Type.VAR_INTS, newIOContext(random()));
valueHolder.numberValue = minMax[i][0];
w.add(0, valueHolder);
valueHolder.numberValue = minMax[i][1];
w.add(1, valueHolder);
w.finish(2);
assertEquals(0, trackBytes.get());
DocValues r = Ints.getValues(dir, "test", 2, Type.VAR_INTS, newIOContext(random));
DocValues r = Ints.getValues(dir, "test", 2, Type.VAR_INTS, newIOContext(random()));
Source source = getSource(r);
assertEquals(i + " with min: " + minMax[i][0] + " max: " + minMax[i][1],
expectedTypes[i], source.getType());
@ -214,13 +216,13 @@ public class TestDocValues extends LuceneTestCase {
byte[] sourceArray = new byte[] {1,2,3};
Directory dir = newDirectory();
final Counter trackBytes = Counter.newCounter();
DocValuesConsumer w = Ints.getWriter(dir, "test", trackBytes, Type.FIXED_INTS_8, newIOContext(random));
DocValuesConsumer w = Ints.getWriter(dir, "test", trackBytes, Type.FIXED_INTS_8, newIOContext(random()));
for (int i = 0; i < sourceArray.length; i++) {
valueHolder.numberValue = (long) sourceArray[i];
w.add(i, valueHolder);
}
w.finish(sourceArray.length);
DocValues r = Ints.getValues(dir, "test", sourceArray.length, Type.FIXED_INTS_8, newIOContext(random));
DocValues r = Ints.getValues(dir, "test", sourceArray.length, Type.FIXED_INTS_8, newIOContext(random()));
Source source = r.getSource();
assertTrue(source.hasArray());
byte[] loaded = ((byte[])source.getArray());
@ -237,13 +239,13 @@ public class TestDocValues extends LuceneTestCase {
short[] sourceArray = new short[] {1,2,3};
Directory dir = newDirectory();
final Counter trackBytes = Counter.newCounter();
DocValuesConsumer w = Ints.getWriter(dir, "test", trackBytes, Type.FIXED_INTS_16, newIOContext(random));
DocValuesConsumer w = Ints.getWriter(dir, "test", trackBytes, Type.FIXED_INTS_16, newIOContext(random()));
for (int i = 0; i < sourceArray.length; i++) {
valueHolder.numberValue = (long) sourceArray[i];
w.add(i, valueHolder);
}
w.finish(sourceArray.length);
DocValues r = Ints.getValues(dir, "test", sourceArray.length, Type.FIXED_INTS_16, newIOContext(random));
DocValues r = Ints.getValues(dir, "test", sourceArray.length, Type.FIXED_INTS_16, newIOContext(random()));
Source source = r.getSource();
assertTrue(source.hasArray());
short[] loaded = ((short[])source.getArray());
@ -260,13 +262,13 @@ public class TestDocValues extends LuceneTestCase {
long[] sourceArray = new long[] {1,2,3};
Directory dir = newDirectory();
final Counter trackBytes = Counter.newCounter();
DocValuesConsumer w = Ints.getWriter(dir, "test", trackBytes, Type.FIXED_INTS_64, newIOContext(random));
DocValuesConsumer w = Ints.getWriter(dir, "test", trackBytes, Type.FIXED_INTS_64, newIOContext(random()));
for (int i = 0; i < sourceArray.length; i++) {
valueHolder.numberValue = sourceArray[i];
w.add(i, valueHolder);
}
w.finish(sourceArray.length);
DocValues r = Ints.getValues(dir, "test", sourceArray.length, Type.FIXED_INTS_64, newIOContext(random));
DocValues r = Ints.getValues(dir, "test", sourceArray.length, Type.FIXED_INTS_64, newIOContext(random()));
Source source = r.getSource();
assertTrue(source.hasArray());
long[] loaded = ((long[])source.getArray());
@ -283,13 +285,13 @@ public class TestDocValues extends LuceneTestCase {
int[] sourceArray = new int[] {1,2,3};
Directory dir = newDirectory();
final Counter trackBytes = Counter.newCounter();
DocValuesConsumer w = Ints.getWriter(dir, "test", trackBytes, Type.FIXED_INTS_32, newIOContext(random));
DocValuesConsumer w = Ints.getWriter(dir, "test", trackBytes, Type.FIXED_INTS_32, newIOContext(random()));
for (int i = 0; i < sourceArray.length; i++) {
valueHolder.numberValue = (long) sourceArray[i];
w.add(i, valueHolder);
}
w.finish(sourceArray.length);
DocValues r = Ints.getValues(dir, "test", sourceArray.length, Type.FIXED_INTS_32, newIOContext(random));
DocValues r = Ints.getValues(dir, "test", sourceArray.length, Type.FIXED_INTS_32, newIOContext(random()));
Source source = r.getSource();
assertTrue(source.hasArray());
int[] loaded = ((int[])source.getArray());
@ -306,13 +308,13 @@ public class TestDocValues extends LuceneTestCase {
float[] sourceArray = new float[] {1,2,3};
Directory dir = newDirectory();
final Counter trackBytes = Counter.newCounter();
DocValuesConsumer w = Floats.getWriter(dir, "test", trackBytes, newIOContext(random), Type.FLOAT_32);
DocValuesConsumer w = Floats.getWriter(dir, "test", trackBytes, newIOContext(random()), Type.FLOAT_32);
for (int i = 0; i < sourceArray.length; i++) {
valueHolder.numberValue = sourceArray[i];
w.add(i, valueHolder);
}
w.finish(sourceArray.length);
DocValues r = Floats.getValues(dir, "test", 3, newIOContext(random), Type.FLOAT_32);
DocValues r = Floats.getValues(dir, "test", 3, newIOContext(random()), Type.FLOAT_32);
Source source = r.getSource();
assertTrue(source.hasArray());
float[] loaded = ((float[])source.getArray());
@ -329,13 +331,13 @@ public class TestDocValues extends LuceneTestCase {
double[] sourceArray = new double[] {1,2,3};
Directory dir = newDirectory();
final Counter trackBytes = Counter.newCounter();
DocValuesConsumer w = Floats.getWriter(dir, "test", trackBytes, newIOContext(random), Type.FLOAT_64);
DocValuesConsumer w = Floats.getWriter(dir, "test", trackBytes, newIOContext(random()), Type.FLOAT_64);
for (int i = 0; i < sourceArray.length; i++) {
valueHolder.numberValue = sourceArray[i];
w.add(i, valueHolder);
}
w.finish(sourceArray.length);
DocValues r = Floats.getValues(dir, "test", 3, newIOContext(random), Type.FLOAT_64);
DocValues r = Floats.getValues(dir, "test", 3, newIOContext(random()), Type.FLOAT_64);
Source source = r.getSource();
assertTrue(source.hasArray());
double[] loaded = ((double[])source.getArray());
@ -350,22 +352,22 @@ public class TestDocValues extends LuceneTestCase {
private void testInts(Type type, int maxBit) throws IOException {
DocValueHolder valueHolder = new DocValueHolder();
long maxV = 1;
final int NUM_VALUES = 333 + random.nextInt(333);
final int NUM_VALUES = 333 + random().nextInt(333);
final long[] values = new long[NUM_VALUES];
for (int rx = 1; rx < maxBit; rx++, maxV *= 2) {
Directory dir = newDirectory();
final Counter trackBytes = Counter.newCounter();
DocValuesConsumer w = Ints.getWriter(dir, "test", trackBytes, type, newIOContext(random));
DocValuesConsumer w = Ints.getWriter(dir, "test", trackBytes, type, newIOContext(random()));
for (int i = 0; i < NUM_VALUES; i++) {
final long v = random.nextLong() % (1 + maxV);
final long v = random().nextLong() % (1 + maxV);
valueHolder.numberValue = values[i] = v;
w.add(i, valueHolder);
}
final int additionalDocs = 1 + random.nextInt(9);
final int additionalDocs = 1 + random().nextInt(9);
w.finish(NUM_VALUES + additionalDocs);
assertEquals(0, trackBytes.get());
DocValues r = Ints.getValues(dir, "test", NUM_VALUES + additionalDocs, type, newIOContext(random));
DocValues r = Ints.getValues(dir, "test", NUM_VALUES + additionalDocs, type, newIOContext(random()));
for (int iter = 0; iter < 2; iter++) {
Source s = getSource(r);
assertEquals(type, s.getType());
@ -388,20 +390,20 @@ public class TestDocValues extends LuceneTestCase {
DocValueHolder valueHolder = new DocValueHolder();
Directory dir = newDirectory();
final Counter trackBytes = Counter.newCounter();
DocValuesConsumer w = Floats.getWriter(dir, "test", trackBytes, newIOContext(random), type);
final int NUM_VALUES = 777 + random.nextInt(777);
DocValuesConsumer w = Floats.getWriter(dir, "test", trackBytes, newIOContext(random()), type);
final int NUM_VALUES = 777 + random().nextInt(777);
final double[] values = new double[NUM_VALUES];
for (int i = 0; i < NUM_VALUES; i++) {
final double v = type == Type.FLOAT_32 ? random.nextFloat() : random
final double v = type == Type.FLOAT_32 ? random().nextFloat() : random()
.nextDouble();
valueHolder.numberValue = values[i] = v;
w.add(i, valueHolder);
}
final int additionalValues = 1 + random.nextInt(10);
final int additionalValues = 1 + random().nextInt(10);
w.finish(NUM_VALUES + additionalValues);
assertEquals(0, trackBytes.get());
DocValues r = Floats.getValues(dir, "test", NUM_VALUES + additionalValues, newIOContext(random), type);
DocValues r = Floats.getValues(dir, "test", NUM_VALUES + additionalValues, newIOContext(random()), type);
for (int iter = 0; iter < 2; iter++) {
Source s = getSource(r);
for (int i = 0; i < NUM_VALUES; i++) {
@ -419,7 +421,7 @@ public class TestDocValues extends LuceneTestCase {
private Source getSource(DocValues values) throws IOException {
// getSource uses cache internally
switch(random.nextInt(5)) {
switch(random().nextInt(5)) {
case 3:
return values.load();
case 2:

View File

@ -96,7 +96,7 @@ public class TestPerFieldPostingsFormat extends LuceneTestCase {
public void testMergeUnusedPerFieldCodec() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig iwconf = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE).setCodec(new MockCodec());
new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setCodec(new MockCodec());
IndexWriter writer = newWriter(dir, iwconf);
addDocs(writer, 10);
writer.commit();
@ -123,7 +123,7 @@ public class TestPerFieldPostingsFormat extends LuceneTestCase {
System.out.println("TEST: make new index");
}
IndexWriterConfig iwconf = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE).setCodec(new MockCodec());
new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setCodec(new MockCodec());
iwconf.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
//((LogMergePolicy) iwconf.getMergePolicy()).setMergeFactor(10);
IndexWriter writer = newWriter(dir, iwconf);
@ -142,7 +142,7 @@ public class TestPerFieldPostingsFormat extends LuceneTestCase {
assertQuery(new Term("content", "aaa"), dir, 10);
Lucene40Codec codec = (Lucene40Codec)iwconf.getCodec();
iwconf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
iwconf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND).setCodec(codec);
//((LogMergePolicy) iwconf.getMergePolicy()).setUseCompoundFile(false);
//((LogMergePolicy) iwconf.getMergePolicy()).setMergeFactor(10);
@ -234,28 +234,28 @@ public class TestPerFieldPostingsFormat extends LuceneTestCase {
*/
@Test
public void testStressPerFieldCodec() throws IOException {
Directory dir = newDirectory(random);
Directory dir = newDirectory(random());
final int docsPerRound = 97;
int numRounds = atLeast(1);
for (int i = 0; i < numRounds; i++) {
int num = _TestUtil.nextInt(random, 30, 60);
IndexWriterConfig config = newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer(random));
int num = _TestUtil.nextInt(random(), 30, 60);
IndexWriterConfig config = newIndexWriterConfig(random(),
TEST_VERSION_CURRENT, new MockAnalyzer(random()));
config.setOpenMode(OpenMode.CREATE_OR_APPEND);
IndexWriter writer = newWriter(dir, config);
for (int j = 0; j < docsPerRound; j++) {
final Document doc = new Document();
for (int k = 0; k < num; k++) {
FieldType customType = new FieldType(TextField.TYPE_UNSTORED);
customType.setTokenized(random.nextBoolean());
customType.setOmitNorms(random.nextBoolean());
customType.setTokenized(random().nextBoolean());
customType.setOmitNorms(random().nextBoolean());
Field field = newField("" + k, _TestUtil
.randomRealisticUnicodeString(random, 128), customType);
.randomRealisticUnicodeString(random(), 128), customType);
doc.add(field);
}
writer.addDocument(doc);
}
if (random.nextBoolean()) {
if (random().nextBoolean()) {
writer.forceMerge(1);
}
writer.commit();

View File

@ -56,13 +56,13 @@ public class Test10KPulsings extends LuceneTestCase {
File f = _TestUtil.getTempDir("10kpulsed");
MockDirectoryWrapper dir = newFSDirectory(f);
dir.setCheckIndexOnClose(false); // we do this ourselves explicitly
RandomIndexWriter iw = new RandomIndexWriter(random, dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setCodec(cp));
RandomIndexWriter iw = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp));
Document document = new Document();
FieldType ft = new FieldType(TextField.TYPE_STORED);
switch(_TestUtil.nextInt(random, 0, 2)) {
switch(_TestUtil.nextInt(random(), 0, 2)) {
case 0: ft.setIndexOptions(IndexOptions.DOCS_ONLY); break;
case 1: ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS); break;
default: ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); break;
@ -87,7 +87,7 @@ public class Test10KPulsings extends LuceneTestCase {
for (int i = 0; i < 10050; i++) {
String expected = df.format(i);
assertEquals(expected, te.next().utf8ToString());
de = _TestUtil.docs(random, te, null, de, false);
de = _TestUtil.docs(random(), te, null, de, false);
assertTrue(de.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(DocIdSetIterator.NO_MORE_DOCS, de.nextDoc());
}
@ -101,19 +101,19 @@ public class Test10KPulsings extends LuceneTestCase {
*/
public void test10kNotPulsed() throws Exception {
// we always run this test with pulsing codec.
int freqCutoff = _TestUtil.nextInt(random, 1, 10);
int freqCutoff = _TestUtil.nextInt(random(), 1, 10);
Codec cp = _TestUtil.alwaysPostingsFormat(new Pulsing40PostingsFormat(freqCutoff));
File f = _TestUtil.getTempDir("10knotpulsed");
MockDirectoryWrapper dir = newFSDirectory(f);
dir.setCheckIndexOnClose(false); // we do this ourselves explicitly
RandomIndexWriter iw = new RandomIndexWriter(random, dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setCodec(cp));
RandomIndexWriter iw = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp));
Document document = new Document();
FieldType ft = new FieldType(TextField.TYPE_STORED);
switch(_TestUtil.nextInt(random, 0, 2)) {
switch(_TestUtil.nextInt(random(), 0, 2)) {
case 0: ft.setIndexOptions(IndexOptions.DOCS_ONLY); break;
case 1: ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS); break;
default: ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); break;
@ -145,7 +145,7 @@ public class Test10KPulsings extends LuceneTestCase {
for (int i = 0; i < 10050; i++) {
String expected = df.format(i);
assertEquals(expected, te.next().utf8ToString());
de = _TestUtil.docs(random, te, null, de, false);
de = _TestUtil.docs(random(), te, null, de, false);
assertTrue(de.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(DocIdSetIterator.NO_MORE_DOCS, de.nextDoc());
}

View File

@ -48,8 +48,8 @@ public class TestPulsingReuse extends LuceneTestCase {
// we always run this test with pulsing codec.
Codec cp = _TestUtil.alwaysPostingsFormat(new Pulsing40PostingsFormat(1));
Directory dir = newDirectory();
RandomIndexWriter iw = new RandomIndexWriter(random, dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setCodec(cp));
RandomIndexWriter iw = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp));
Document doc = new Document();
doc.add(new Field("foo", "a b b c c c d e f g g h i i j j k", TextField.TYPE_UNSTORED));
iw.addDocument(doc);
@ -87,8 +87,8 @@ public class TestPulsingReuse extends LuceneTestCase {
Codec cp = _TestUtil.alwaysPostingsFormat(new NestedPulsingPostingsFormat());
MockDirectoryWrapper dir = newDirectory();
dir.setCheckIndexOnClose(false); // will do this ourselves, custom codec
RandomIndexWriter iw = new RandomIndexWriter(random, dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setCodec(cp));
RandomIndexWriter iw = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp));
Document doc = new Document();
doc.add(new Field("foo", "a b b c c c d e f g g g h i i j j k l l m m m", TextField.TYPE_UNSTORED));
// note: the reuse is imperfect, here we would have 4 enums (lost reuse when we get an enum for 'm')

View File

@ -51,7 +51,7 @@ public class TestBinaryDocument extends LuceneTestCase {
/** add the doc to a ram index */
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random, dir);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
writer.addDocument(doc);
/** open a reader and fetch the document */
@ -85,7 +85,7 @@ public class TestBinaryDocument extends LuceneTestCase {
/** add the doc to a ram index */
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random, dir);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
writer.addDocument(doc);
/** open a reader and fetch the document */

View File

@ -171,7 +171,7 @@ public class TestDocument extends LuceneTestCase {
*/
public void testGetValuesForIndexedDocument() throws Exception {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random, dir);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
writer.addDocument(makeDocumentWithFields());
IndexReader reader = writer.getReader();
@ -256,7 +256,7 @@ public class TestDocument extends LuceneTestCase {
doc.add(new Field("keyword", "test", StringField.TYPE_STORED));
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random, dir);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
writer.addDocument(doc);
field.setStringValue("id2");
writer.addDocument(doc);
@ -299,7 +299,7 @@ public class TestDocument extends LuceneTestCase {
// LUCENE-3682
public void testTransitionAPI() throws Exception {
Directory dir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random, dir);
RandomIndexWriter w = new RandomIndexWriter(random(), dir);
Document doc = new Document();
doc.add(new Field("stored", "abc", Field.Store.YES, Field.Index.NO));
@ -363,7 +363,7 @@ public class TestDocument extends LuceneTestCase {
public void testBoost() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
IndexWriter iw = new IndexWriter(dir, iwc);
Document doc = new Document();

View File

@ -46,7 +46,7 @@ public class Test2BPostings extends LuceneTestCase {
dir.setCheckIndexOnClose(false); // don't double-checkindex
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setRAMBufferSizeMB(256.0)
.setMergeScheduler(new ConcurrentMergeScheduler())

View File

@ -149,7 +149,7 @@ public class Test2BTerms extends LuceneTestCase {
System.out.println("Starting Test2B");
final long TERM_COUNT = ((long) Integer.MAX_VALUE) + 100000000;
final int TERMS_PER_DOC = _TestUtil.nextInt(random, 100000, 1000000);
final int TERMS_PER_DOC = _TestUtil.nextInt(random(), 100000, 1000000);
List<BytesRef> savedTerms = null;
@ -161,7 +161,7 @@ public class Test2BTerms extends LuceneTestCase {
if (true) {
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setRAMBufferSizeMB(256.0)
.setMergeScheduler(new ConcurrentMergeScheduler())
@ -175,7 +175,7 @@ public class Test2BTerms extends LuceneTestCase {
}
Document doc = new Document();
final MyTokenStream ts = new MyTokenStream(random, TERMS_PER_DOC);
final MyTokenStream ts = new MyTokenStream(random(), TERMS_PER_DOC);
FieldType customType = new FieldType(TextField.TYPE_UNSTORED);
customType.setIndexOptions(IndexOptions.DOCS_ONLY);
@ -227,13 +227,13 @@ public class Test2BTerms extends LuceneTestCase {
System.out.println("TEST: findTerms");
final TermsEnum termsEnum = MultiFields.getTerms(r, "field").iterator(null);
final List<BytesRef> savedTerms = new ArrayList<BytesRef>();
int nextSave = _TestUtil.nextInt(random, 500000, 1000000);
int nextSave = _TestUtil.nextInt(random(), 500000, 1000000);
BytesRef term;
while((term = termsEnum.next()) != null) {
if (--nextSave == 0) {
savedTerms.add(BytesRef.deepCopyOf(term));
System.out.println("TEST: add " + term);
nextSave = _TestUtil.nextInt(random, 500000, 1000000);
nextSave = _TestUtil.nextInt(random(), 500000, 1000000);
}
}
return savedTerms;
@ -246,7 +246,7 @@ public class Test2BTerms extends LuceneTestCase {
TermsEnum termsEnum = MultiFields.getTerms(r, "field").iterator(null);
boolean failed = false;
for(int iter=0;iter<10*terms.size();iter++) {
final BytesRef term = terms.get(random.nextInt(terms.size()));
final BytesRef term = terms.get(random().nextInt(terms.size()));
System.out.println("TEST: search " + term);
final long t0 = System.currentTimeMillis();
final int count = s.search(new TermQuery(new Term("field", term)), 1).totalHits;

View File

@ -70,7 +70,7 @@ public class TestAddIndexes extends LuceneTestCase {
IndexWriter writer = null;
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random))
new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE));
// add 100 documents
addDocs(writer, 100);
@ -80,7 +80,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer = newWriter(
aux,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setOpenMode(OpenMode.CREATE).
setMergePolicy(newLogMergePolicy(false))
);
@ -89,14 +89,14 @@ public class TestAddIndexes extends LuceneTestCase {
assertEquals(40, writer.maxDoc());
writer.close();
writer = newWriter(aux2, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
writer = newWriter(aux2, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE));
// add 50 documents in compound files
addDocs2(writer, 50);
assertEquals(50, writer.maxDoc());
writer.close();
// test doc count before segments are merged
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
assertEquals(100, writer.maxDoc());
writer.addIndexes(aux, aux2);
assertEquals(190, writer.maxDoc());
@ -111,14 +111,14 @@ public class TestAddIndexes extends LuceneTestCase {
// now add another set in.
Directory aux3 = newDirectory();
writer = newWriter(aux3, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
writer = newWriter(aux3, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
// add 40 documents
addDocs(writer, 40);
assertEquals(40, writer.maxDoc());
writer.close();
// test doc count before segments are merged
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
assertEquals(190, writer.maxDoc());
writer.addIndexes(aux3);
assertEquals(230, writer.maxDoc());
@ -132,7 +132,7 @@ public class TestAddIndexes extends LuceneTestCase {
verifyTermDocs(dir, new Term("content", "bbb"), 50);
// now fully merge it.
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
writer.forceMerge(1);
writer.close();
@ -145,11 +145,11 @@ public class TestAddIndexes extends LuceneTestCase {
// now add a single document
Directory aux4 = newDirectory();
writer = newWriter(aux4, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
writer = newWriter(aux4, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
addDocs2(writer, 1);
writer.close();
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
assertEquals(230, writer.maxDoc());
writer.addIndexes(aux4);
assertEquals(231, writer.maxDoc());
@ -172,7 +172,7 @@ public class TestAddIndexes extends LuceneTestCase {
Directory aux = newDirectory();
setUpDirs(dir, aux);
IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
writer.addIndexes(aux);
// Adds 10 docs, then replaces them with another 10
@ -208,7 +208,7 @@ public class TestAddIndexes extends LuceneTestCase {
Directory aux = newDirectory();
setUpDirs(dir, aux);
IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
// Adds 10 docs, then replaces them with another 10
// docs, so 10 pending deletes:
@ -246,7 +246,7 @@ public class TestAddIndexes extends LuceneTestCase {
Directory aux = newDirectory();
setUpDirs(dir, aux);
IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
// Adds 10 docs, then replaces them with another 10
// docs, so 10 pending deletes:
@ -286,7 +286,7 @@ public class TestAddIndexes extends LuceneTestCase {
IndexWriter writer = null;
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
// add 100 documents
addDocs(writer, 100);
assertEquals(100, writer.maxDoc());
@ -294,7 +294,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer = newWriter(
aux,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setOpenMode(OpenMode.CREATE).
setMaxBufferedDocs(1000).
setMergePolicy(newLogMergePolicy(false))
@ -304,7 +304,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer.close();
writer = newWriter(
aux,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setOpenMode(OpenMode.CREATE).
setMaxBufferedDocs(1000).
setMergePolicy(newLogMergePolicy(false))
@ -312,7 +312,7 @@ public class TestAddIndexes extends LuceneTestCase {
addDocs(writer, 100);
writer.close();
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
try {
// cannot add self
writer.addIndexes(aux, dir);
@ -342,7 +342,7 @@ public class TestAddIndexes extends LuceneTestCase {
IndexWriter writer = newWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setOpenMode(OpenMode.APPEND).
setMaxBufferedDocs(10).
setMergePolicy(newLogMergePolicy(4))
@ -371,7 +371,7 @@ public class TestAddIndexes extends LuceneTestCase {
IndexWriter writer = newWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setOpenMode(OpenMode.APPEND).
setMaxBufferedDocs(9).
setMergePolicy(newLogMergePolicy(4))
@ -400,13 +400,13 @@ public class TestAddIndexes extends LuceneTestCase {
IndexWriter writer = newWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setOpenMode(OpenMode.APPEND).
setMaxBufferedDocs(10).
setMergePolicy(newLogMergePolicy(4))
);
writer.addIndexes(aux, new MockDirectoryWrapper(random, new RAMDirectory(aux, newIOContext(random))));
writer.addIndexes(aux, new MockDirectoryWrapper(random(), new RAMDirectory(aux, newIOContext(random()))));
assertEquals(1060, writer.maxDoc());
assertEquals(1000, writer.getDocCount(0));
writer.close();
@ -426,7 +426,7 @@ public class TestAddIndexes extends LuceneTestCase {
setUpDirs(dir, aux, true);
IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.COMPOUND_FILES);
IndexWriter writer = new IndexWriter(aux, dontMergeConfig);
for (int i = 0; i < 20; i++) {
@ -439,7 +439,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer = newWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setOpenMode(OpenMode.APPEND).
setMaxBufferedDocs(4).
setMergePolicy(newLogMergePolicy(4))
@ -448,7 +448,7 @@ public class TestAddIndexes extends LuceneTestCase {
if (VERBOSE) {
System.out.println("\nTEST: now addIndexes");
}
writer.addIndexes(aux, new MockDirectoryWrapper(random, new RAMDirectory(aux, newIOContext(random))));
writer.addIndexes(aux, new MockDirectoryWrapper(random(), new RAMDirectory(aux, newIOContext(random()))));
assertEquals(1020, writer.maxDoc());
assertEquals(1000, writer.getDocCount(0));
writer.close();
@ -468,7 +468,7 @@ public class TestAddIndexes extends LuceneTestCase {
IndexWriter writer = newWriter(
aux2,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setOpenMode(OpenMode.CREATE).
setMaxBufferedDocs(100).
setMergePolicy(newLogMergePolicy(10))
@ -478,7 +478,7 @@ public class TestAddIndexes extends LuceneTestCase {
assertEquals(3, writer.getSegmentCount());
writer.close();
IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.COMPOUND_FILES);
writer = new IndexWriter(aux, dontMergeConfig);
for (int i = 0; i < 27; i++) {
@ -489,7 +489,7 @@ public class TestAddIndexes extends LuceneTestCase {
assertEquals(3, reader.numDocs());
reader.close();
dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.COMPOUND_FILES);
writer = new IndexWriter(aux2, dontMergeConfig);
for (int i = 0; i < 8; i++) {
@ -502,7 +502,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer = newWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setOpenMode(OpenMode.APPEND).
setMaxBufferedDocs(6).
setMergePolicy(newLogMergePolicy(4))
@ -550,7 +550,7 @@ public class TestAddIndexes extends LuceneTestCase {
private void verifyTermDocs(Directory dir, Term term, int numDocs)
throws IOException {
IndexReader reader = IndexReader.open(dir);
DocsEnum docsEnum = _TestUtil.docs(random, reader, term.field, term.bytes, null, null, false);
DocsEnum docsEnum = _TestUtil.docs(random(), reader, term.field, term.bytes, null, null, false);
int count = 0;
while (docsEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS)
count++;
@ -565,7 +565,7 @@ public class TestAddIndexes extends LuceneTestCase {
private void setUpDirs(Directory dir, Directory aux, boolean withID) throws IOException {
IndexWriter writer = null;
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
// add 1000 documents in 1 segment
if (withID) {
addDocsWithID(writer, 1000, 0);
@ -578,7 +578,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer = newWriter(
aux,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setOpenMode(OpenMode.CREATE).
setMaxBufferedDocs(1000).
setMergePolicy(newLogMergePolicy(false, 10))
@ -593,7 +593,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer.close();
writer = newWriter(
aux,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setOpenMode(OpenMode.APPEND).
setMaxBufferedDocs(1000).
setMergePolicy(newLogMergePolicy(false, 10))
@ -612,7 +612,7 @@ public class TestAddIndexes extends LuceneTestCase {
lmp.setUseCompoundFile(false);
lmp.setMergeFactor(100);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(5).setMergePolicy(lmp));
Document doc = new Document();
@ -641,7 +641,7 @@ public class TestAddIndexes extends LuceneTestCase {
lmp.setUseCompoundFile(false);
lmp.setMergeFactor(4);
writer = new IndexWriter(dir2, newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random))
new MockAnalyzer(random()))
.setMergeScheduler(new SerialMergeScheduler()).setMergePolicy(lmp));
writer.addIndexes(dir);
writer.close();
@ -672,16 +672,16 @@ public class TestAddIndexes extends LuceneTestCase {
public RunAddIndexesThreads(int numCopy) throws Throwable {
NUM_COPY = numCopy;
dir = new MockDirectoryWrapper(random, new RAMDirectory());
dir = new MockDirectoryWrapper(random(), new RAMDirectory());
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(2));
for (int i = 0; i < NUM_INIT_DOCS; i++)
addDoc(writer);
writer.close();
dir2 = newDirectory();
writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer2.commit();
@ -700,7 +700,7 @@ public class TestAddIndexes extends LuceneTestCase {
final Directory[] dirs = new Directory[NUM_COPY];
for(int k=0;k<NUM_COPY;k++)
dirs[k] = new MockDirectoryWrapper(random, new RAMDirectory(dir, newIOContext(random)));
dirs[k] = new MockDirectoryWrapper(random(), new RAMDirectory(dir, newIOContext(random())));
int j=0;
@ -925,7 +925,7 @@ public class TestAddIndexes extends LuceneTestCase {
CommitAndAddIndexes3 c = new CommitAndAddIndexes3(NUM_COPY);
c.launchThreads(-1);
Thread.sleep(_TestUtil.nextInt(random, 10, 500));
Thread.sleep(_TestUtil.nextInt(random(), 10, 500));
// Close w/o first stopping/joining the threads
if (VERBOSE) {
@ -950,7 +950,7 @@ public class TestAddIndexes extends LuceneTestCase {
CommitAndAddIndexes3 c = new CommitAndAddIndexes3(NUM_COPY);
c.launchThreads(-1);
Thread.sleep(_TestUtil.nextInt(random, 10, 500));
Thread.sleep(_TestUtil.nextInt(random(), 10, 500));
// Close w/o first stopping/joining the threads
if (VERBOSE) {
@ -971,7 +971,7 @@ public class TestAddIndexes extends LuceneTestCase {
Directory[] dirs = new Directory[2];
for (int i = 0; i < dirs.length; i++) {
dirs[i] = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dirs[i], conf);
Document doc = new Document();
doc.add(new StringField("id", "myid"));
@ -979,7 +979,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer.close();
}
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dirs[0], conf);
// Now delete the document
@ -1020,7 +1020,7 @@ public class TestAddIndexes extends LuceneTestCase {
IndexWriter writer = null;
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE).setCodec(codec));
new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setCodec(codec));
// add 100 documents
addDocsWithID(writer, 100, 0);
assertEquals(100, writer.maxDoc());
@ -1030,7 +1030,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer = newWriter(
aux,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setOpenMode(OpenMode.CREATE).
setCodec(codec).
setMaxBufferedDocs(10).
@ -1044,7 +1044,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer = newWriter(
aux2,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setOpenMode(OpenMode.CREATE).
setCodec(codec)
);
@ -1057,7 +1057,7 @@ public class TestAddIndexes extends LuceneTestCase {
// test doc count before segments are merged
writer = newWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setOpenMode(OpenMode.APPEND).
setCodec(codec)
);
@ -1094,7 +1094,7 @@ public class TestAddIndexes extends LuceneTestCase {
Directory[] dirs = new Directory[2];
for (int i = 0; i < dirs.length; i++) {
dirs[i] = new RAMDirectory();
IndexWriter w = new IndexWriter(dirs[i], new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter w = new IndexWriter(dirs[i], new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document d = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
customType.setStoreTermVectors(true);
@ -1105,8 +1105,8 @@ public class TestAddIndexes extends LuceneTestCase {
IndexReader[] readers = new IndexReader[] { IndexReader.open(dirs[0]), IndexReader.open(dirs[1]) };
Directory dir = new MockDirectoryWrapper(random, new RAMDirectory());
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy());
Directory dir = new MockDirectoryWrapper(random(), new RAMDirectory());
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy());
LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
lmp.setUseCompoundFile(true);
lmp.setNoCFSRatio(1.0); // Force creation of CFS
@ -1175,7 +1175,7 @@ public class TestAddIndexes extends LuceneTestCase {
toAdd.setCheckIndexOnClose(false);
{
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random));
new MockAnalyzer(random()));
conf.setCodec(new UnRegisteredCodec());
IndexWriter w = new IndexWriter(toAdd, conf);
Document doc = new Document();
@ -1189,8 +1189,8 @@ public class TestAddIndexes extends LuceneTestCase {
{
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random));
conf.setCodec(_TestUtil.alwaysPostingsFormat(new Pulsing40PostingsFormat(1 + random.nextInt(20))));
new MockAnalyzer(random()));
conf.setCodec(_TestUtil.alwaysPostingsFormat(new Pulsing40PostingsFormat(1 + random().nextInt(20))));
IndexWriter w = new IndexWriter(dir, conf);
try {
w.addIndexes(toAdd);
@ -1217,7 +1217,7 @@ public class TestAddIndexes extends LuceneTestCase {
// LUCENE-3575
public void testFieldNamesChanged() throws IOException {
Directory d1 = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random, d1);
RandomIndexWriter w = new RandomIndexWriter(random(), d1);
Document doc = new Document();
doc.add(newField("f1", "doc1 field1", StringField.TYPE_STORED));
doc.add(newField("id", "1", StringField.TYPE_STORED));
@ -1226,7 +1226,7 @@ public class TestAddIndexes extends LuceneTestCase {
w.close();
Directory d2 = newDirectory();
w = new RandomIndexWriter(random, d2);
w = new RandomIndexWriter(random(), d2);
doc = new Document();
doc.add(newField("f2", "doc2 field2", StringField.TYPE_STORED));
doc.add(newField("id", "2", StringField.TYPE_STORED));
@ -1235,7 +1235,7 @@ public class TestAddIndexes extends LuceneTestCase {
w.close();
Directory d3 = newDirectory();
w = new RandomIndexWriter(random, d3);
w = new RandomIndexWriter(random(), d3);
w.addIndexes(r1, r2);
r1.close();
d1.close();
@ -1260,7 +1260,7 @@ public class TestAddIndexes extends LuceneTestCase {
public void testDocValues() throws IOException {
assumeFalse("preflex does not support docvalues", Codec.getDefault().getName().equals("Lucene3x"));
Directory d1 = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random, d1);
RandomIndexWriter w = new RandomIndexWriter(random(), d1);
Document doc = new Document();
doc.add(newField("id", "1", StringField.TYPE_STORED));
doc.add(new DocValuesField("dv", 1, DocValues.Type.VAR_INTS));
@ -1269,7 +1269,7 @@ public class TestAddIndexes extends LuceneTestCase {
w.close();
Directory d2 = newDirectory();
w = new RandomIndexWriter(random, d2);
w = new RandomIndexWriter(random(), d2);
doc = new Document();
doc.add(newField("id", "2", StringField.TYPE_STORED));
doc.add(new DocValuesField("dv", 2, DocValues.Type.VAR_INTS));
@ -1278,7 +1278,7 @@ public class TestAddIndexes extends LuceneTestCase {
w.close();
Directory d3 = newDirectory();
w = new RandomIndexWriter(random, d3);
w = new RandomIndexWriter(random(), d3);
w.addIndexes(SlowCompositeReaderWrapper.wrap(r1), SlowCompositeReaderWrapper.wrap(r2));
r1.close();
d1.close();

View File

@ -16,29 +16,23 @@ package org.apache.lucene.index;
* limitations under the License.
*/
import org.apache.lucene.util.*;
import org.apache.lucene.store.*;
import org.apache.lucene.document.*;
import org.apache.lucene.analysis.MockAnalyzer;
import java.util.Random;
import java.io.File;
import java.io.IOException;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.*;
import org.apache.lucene.store.*;
import org.apache.lucene.util.*;
public class TestAtomicUpdate extends LuceneTestCase {
private static final class MockIndexWriter extends IndexWriter {
static Random RANDOM;
public MockIndexWriter(Directory dir, IndexWriterConfig conf) throws IOException {
super(dir, conf);
}
@Override
boolean testPoint(String name) {
// if (name.equals("startCommit")) {
if (RANDOM.nextInt(4) == 2)
if (LuceneTestCase.random().nextInt(4) == 2)
Thread.yield();
return true;
}
@ -127,7 +121,7 @@ public class TestAtomicUpdate extends LuceneTestCase {
TimedThread[] threads = new TimedThread[4];
IndexWriterConfig conf = new IndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(7);
((TieredMergePolicy) conf.getMergePolicy()).setMaxMergeAtOnce(3);
IndexWriter writer = new MockIndexWriter(directory, conf);
@ -185,11 +179,10 @@ public class TestAtomicUpdate extends LuceneTestCase {
FSDirectory.
*/
public void testAtomicUpdates() throws Exception {
MockIndexWriter.RANDOM = random;
Directory directory;
// First in a RAM directory:
directory = new MockDirectoryWrapper(random, new RAMDirectory());
directory = new MockDirectoryWrapper(random(), new RAMDirectory());
runTest(directory);
directory.close();

View File

@ -176,7 +176,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
try {
writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
fail("IndexWriter creation should not pass for "+unsupportedNames[i]);
} catch (IndexFormatTooOldException e) {
// pass
@ -217,7 +217,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
}
Directory dir = newDirectory(oldIndexDirs.get(name));
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
w.forceMerge(1);
w.close();
@ -229,7 +229,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
for (String name : oldNames) {
Directory targetDir = newDirectory();
IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
w.addIndexes(oldIndexDirs.get(name));
w.close();
@ -243,7 +243,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
Directory targetDir = newDirectory();
IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
w.addIndexes(reader);
w.close();
reader.close();
@ -261,7 +261,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
public void testIndexOldIndexNoAdds() throws IOException {
for (String name : oldNames) {
Directory dir = newDirectory(oldIndexDirs.get(name));
changeIndexNoAdds(random, dir);
changeIndexNoAdds(random(), dir);
dir.close();
}
}
@ -272,7 +272,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
System.out.println("TEST: oldName=" + name);
}
Directory dir = newDirectory(oldIndexDirs.get(name));
changeIndexWithAdds(random, dir, name);
changeIndexWithAdds(random(), dir, name);
dir.close();
}
}
@ -426,7 +426,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
mp.setUseCompoundFile(doCFS);
mp.setNoCFSRatio(1.0);
// TODO: remove randomness
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(10).setMergePolicy(mp);
IndexWriter writer = new IndexWriter(dir, conf);
@ -445,7 +445,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
mp.setUseCompoundFile(doCFS);
mp.setNoCFSRatio(1.0);
// TODO: remove randomness
conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(10).setMergePolicy(mp);
writer = new IndexWriter(dir, conf);
addNoProxDoc(writer);
@ -480,7 +480,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(-1).
setRAMBufferSizeMB(16.0).
setMergePolicy(mergePolicy)
@ -494,7 +494,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
// Delete one doc so we get a .del file:
writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES)
);
Term searchTerm = new Term("id", "7");
@ -597,7 +597,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
// should be found exactly
assertEquals(TermsEnum.SeekStatus.FOUND,
terms.seekCeil(aaaTerm));
assertEquals(35, countDocs(_TestUtil.docs(random, terms, null, null, false)));
assertEquals(35, countDocs(_TestUtil.docs(random(), terms, null, null, false)));
assertNull(terms.next());
// should hit end of field
@ -609,12 +609,12 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
assertEquals(TermsEnum.SeekStatus.NOT_FOUND,
terms.seekCeil(new BytesRef("a")));
assertTrue(terms.term().bytesEquals(aaaTerm));
assertEquals(35, countDocs(_TestUtil.docs(random, terms, null, null, false)));
assertEquals(35, countDocs(_TestUtil.docs(random(), terms, null, null, false)));
assertNull(terms.next());
assertEquals(TermsEnum.SeekStatus.FOUND,
terms.seekCeil(aaaTerm));
assertEquals(35, countDocs(_TestUtil.docs(random, terms,null, null, false)));
assertEquals(35, countDocs(_TestUtil.docs(random(), terms,null, null, false)));
assertNull(terms.next());
r.close();
@ -713,12 +713,12 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
RAMDirectory ramDir = new RAMDirectory();
for (int i = 0; i < 3; i++) {
// only use Log- or TieredMergePolicy, to make document addition predictable and not suddenly merge:
MergePolicy mp = random.nextBoolean() ? newLogMergePolicy() : newTieredMergePolicy();
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
MergePolicy mp = random().nextBoolean() ? newLogMergePolicy() : newTieredMergePolicy();
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMergePolicy(mp);
IndexWriter w = new IndexWriter(ramDir, iwc);
// add few more docs:
for(int j = 0; j < RANDOM_MULTIPLIER * random.nextInt(30); j++) {
for(int j = 0; j < RANDOM_MULTIPLIER * random().nextInt(30); j++) {
addDoc(w, id++);
}
w.close(false);
@ -726,7 +726,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
// add dummy segments (which are all in current
// version) to single segment index
MergePolicy mp = random.nextBoolean() ? newLogMergePolicy() : newTieredMergePolicy();
MergePolicy mp = random().nextBoolean() ? newLogMergePolicy() : newTieredMergePolicy();
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, null)
.setMergePolicy(mp);
IndexWriter w = new IndexWriter(dir, iwc);

View File

@ -40,7 +40,7 @@ public class TestBinaryTerms extends LuceneTestCase {
Codec.getDefault().getName().equals("Lucene3x"));
Directory dir = newDirectory();
RandomIndexWriter iw = new RandomIndexWriter(random, dir);
RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
BytesRef bytes = new BytesRef(2);
BinaryTokenStream tokenStream = new BinaryTokenStream(bytes);

View File

@ -42,7 +42,7 @@ public class TestByteSlices extends LuceneTestCase {
int num = atLeast(10000);
for (int iter = 0; iter < num; iter++) {
int stream = random.nextInt(NUM_STREAM);
int stream = random().nextInt(NUM_STREAM);
if (VERBOSE)
System.out.println("write stream=" + stream);
@ -54,12 +54,12 @@ public class TestByteSlices extends LuceneTestCase {
}
writer.init(uptos[stream]);
int numValue = random.nextInt(20);
int numValue = random().nextInt(20);
for(int j=0;j<numValue;j++) {
if (VERBOSE)
System.out.println(" write " + (counters[stream]+j));
// write some large (incl. negative) ints:
writer.writeVInt(random.nextInt());
writer.writeVInt(random().nextInt());
writer.writeVInt(counters[stream]+j);
}
counters[stream] += numValue;

View File

@ -35,7 +35,7 @@ public class TestCheckIndex extends LuceneTestCase {
public void testDeletedDocs() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2));
for(int i=0;i<19;i++) {
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);

View File

@ -179,7 +179,7 @@ public class TestCodecs extends LuceneTestCase {
final private static String SEGMENT = "0";
TermData[] makeRandomTerms(final boolean omitTF, final boolean storePayloads) {
final int numTerms = 1+random.nextInt(NUM_TERMS_RAND);
final int numTerms = 1+random().nextInt(NUM_TERMS_RAND);
//final int numTerms = 2;
final TermData[] terms = new TermData[numTerms];
@ -190,14 +190,14 @@ public class TestCodecs extends LuceneTestCase {
// Make term text
String text2;
while(true) {
text2 = _TestUtil.randomUnicodeString(random);
text2 = _TestUtil.randomUnicodeString(random());
if (!termsSeen.contains(text2) && !text2.endsWith(".")) {
termsSeen.add(text2);
break;
}
}
final int docFreq = 1+random.nextInt(DOC_FREQ_RAND);
final int docFreq = 1+random().nextInt(DOC_FREQ_RAND);
final int[] docs = new int[docFreq];
PositionData[][] positions;
@ -208,21 +208,21 @@ public class TestCodecs extends LuceneTestCase {
int docID = 0;
for(int j=0;j<docFreq;j++) {
docID += _TestUtil.nextInt(random, 1, 10);
docID += _TestUtil.nextInt(random(), 1, 10);
docs[j] = docID;
if (!omitTF) {
final int termFreq = 1+random.nextInt(TERM_DOC_FREQ_RAND);
final int termFreq = 1+random().nextInt(TERM_DOC_FREQ_RAND);
positions[j] = new PositionData[termFreq];
int position = 0;
for(int k=0;k<termFreq;k++) {
position += _TestUtil.nextInt(random, 1, 10);
position += _TestUtil.nextInt(random(), 1, 10);
final BytesRef payload;
if (storePayloads && random.nextInt(4) == 0) {
final byte[] bytes = new byte[1+random.nextInt(5)];
if (storePayloads && random().nextInt(4) == 0) {
final byte[] bytes = new byte[1+random().nextInt(5)];
for(int l=0;l<bytes.length;l++) {
bytes[l] = (byte) random.nextInt(255);
bytes[l] = (byte) random().nextInt(255);
}
payload = new BytesRef(bytes);
} else {
@ -260,7 +260,7 @@ public class TestCodecs extends LuceneTestCase {
Codec codec = Codec.getDefault();
final SegmentInfo si = new SegmentInfo(SEGMENT, 10000, dir, false, codec, clonedFieldInfos);
final FieldsProducer reader = codec.postingsFormat().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, newIOContext(random), DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR));
final FieldsProducer reader = codec.postingsFormat().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, newIOContext(random()), DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR));
final FieldsEnum fieldsEnum = reader.iterator();
assertNotNull(fieldsEnum.next());
@ -279,7 +279,7 @@ public class TestCodecs extends LuceneTestCase {
// make sure it properly fully resets (rewinds) its
// internal state:
for(int iter=0;iter<2;iter++) {
docsEnum = _TestUtil.docs(random, termsEnum, null, docsEnum, false);
docsEnum = _TestUtil.docs(random(), termsEnum, null, docsEnum, false);
assertEquals(terms[i].docs[0], docsEnum.nextDoc());
assertEquals(DocIdSetIterator.NO_MORE_DOCS, docsEnum.nextDoc());
}
@ -319,7 +319,7 @@ public class TestCodecs extends LuceneTestCase {
if (VERBOSE) {
System.out.println("TEST: now read postings");
}
final FieldsProducer terms = codec.postingsFormat().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, newIOContext(random), DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR));
final FieldsProducer terms = codec.postingsFormat().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, newIOContext(random()), DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR));
final Verify[] threads = new Verify[NUM_TEST_THREADS-1];
for(int i=0;i<NUM_TEST_THREADS-1;i++) {
@ -342,7 +342,7 @@ public class TestCodecs extends LuceneTestCase {
public void testSepPositionAfterMerge() throws IOException {
final Directory dir = newDirectory();
final IndexWriterConfig config = newIndexWriterConfig(Version.LUCENE_31,
new MockAnalyzer(random));
new MockAnalyzer(random()));
config.setCodec(_TestUtil.alwaysPostingsFormat(new MockSepPostingsFormat()));
final IndexWriter writer = new IndexWriter(dir, config);
@ -439,7 +439,7 @@ public class TestCodecs extends LuceneTestCase {
assertEquals(positions[i].pos, pos);
if (positions[i].payload != null) {
assertTrue(posEnum.hasPayload());
if (LuceneTestCase.random.nextInt(3) < 2) {
if (random().nextInt(3) < 2) {
// Verify the payload bytes
final BytesRef otherPayload = posEnum.getPayload();
assertTrue("expected=" + positions[i].payload.toString() + " got=" + otherPayload.toString(), positions[i].payload.equals(otherPayload));
@ -453,7 +453,7 @@ public class TestCodecs extends LuceneTestCase {
public void _run() throws Throwable {
for(int iter=0;iter<NUM_TEST_ITER;iter++) {
final FieldData field = fields[LuceneTestCase.random.nextInt(fields.length)];
final FieldData field = fields[random().nextInt(fields.length)];
final TermsEnum termsEnum = termsDict.terms(field.fieldInfo.name).iterator(null);
if (si.getCodec() instanceof Lucene3xCodec) {
// code below expects unicode sort order
@ -473,18 +473,18 @@ public class TestCodecs extends LuceneTestCase {
assertEquals(upto, field.terms.length);
// Test random seek:
TermData term = field.terms[LuceneTestCase.random.nextInt(field.terms.length)];
TermData term = field.terms[random().nextInt(field.terms.length)];
TermsEnum.SeekStatus status = termsEnum.seekCeil(new BytesRef(term.text2));
assertEquals(status, TermsEnum.SeekStatus.FOUND);
assertEquals(term.docs.length, termsEnum.docFreq());
if (field.omitTF) {
this.verifyDocs(term.docs, term.positions, _TestUtil.docs(random, termsEnum, null, null, false), false);
this.verifyDocs(term.docs, term.positions, _TestUtil.docs(random(), termsEnum, null, null, false), false);
} else {
this.verifyDocs(term.docs, term.positions, termsEnum.docsAndPositions(null, null, false), true);
}
// Test random seek by ord:
final int idx = LuceneTestCase.random.nextInt(field.terms.length);
final int idx = random().nextInt(field.terms.length);
term = field.terms[idx];
boolean success = false;
try {
@ -498,7 +498,7 @@ public class TestCodecs extends LuceneTestCase {
assertTrue(termsEnum.term().bytesEquals(new BytesRef(term.text2)));
assertEquals(term.docs.length, termsEnum.docFreq());
if (field.omitTF) {
this.verifyDocs(term.docs, term.positions, _TestUtil.docs(random, termsEnum, null, null, false), false);
this.verifyDocs(term.docs, term.positions, _TestUtil.docs(random(), termsEnum, null, null, false), false);
} else {
this.verifyDocs(term.docs, term.positions, termsEnum.docsAndPositions(null, null, false), true);
}
@ -509,7 +509,7 @@ public class TestCodecs extends LuceneTestCase {
System.out.println("TEST: seek non-exist terms");
}
for(int i=0;i<100;i++) {
final String text2 = _TestUtil.randomUnicodeString(random) + ".";
final String text2 = _TestUtil.randomUnicodeString(random()) + ".";
status = termsEnum.seekCeil(new BytesRef(text2));
assertTrue(status == TermsEnum.SeekStatus.NOT_FOUND ||
status == TermsEnum.SeekStatus.END);
@ -547,7 +547,7 @@ public class TestCodecs extends LuceneTestCase {
upto = 0;
do {
term = field.terms[upto];
if (LuceneTestCase.random.nextInt(3) == 1) {
if (random().nextInt(3) == 1) {
final DocsEnum docs;
final DocsEnum docsAndFreqs;
final DocsAndPositionsEnum postings;
@ -556,12 +556,12 @@ public class TestCodecs extends LuceneTestCase {
if (postings != null) {
docs = docsAndFreqs = postings;
} else {
docs = docsAndFreqs = _TestUtil.docs(random, termsEnum, null, null, true);
docs = docsAndFreqs = _TestUtil.docs(random(), termsEnum, null, null, true);
}
} else {
postings = null;
docsAndFreqs = null;
docs = _TestUtil.docs(random, termsEnum, null, null, false);
docs = _TestUtil.docs(random(), termsEnum, null, null, false);
}
assertNotNull(docs);
int upto2 = -1;
@ -569,10 +569,10 @@ public class TestCodecs extends LuceneTestCase {
// Maybe skip:
final int left = term.docs.length-upto2;
int doc;
if (LuceneTestCase.random.nextInt(3) == 1 && left >= 1) {
final int inc = 1+LuceneTestCase.random.nextInt(left-1);
if (random().nextInt(3) == 1 && left >= 1) {
final int inc = 1+random().nextInt(left-1);
upto2 += inc;
if (LuceneTestCase.random.nextInt(2) == 1) {
if (random().nextInt(2) == 1) {
doc = docs.advance(term.docs[upto2]);
assertEquals(term.docs[upto2], doc);
} else {
@ -597,7 +597,7 @@ public class TestCodecs extends LuceneTestCase {
assertEquals(term.docs[upto2], doc);
if (!field.omitTF) {
assertEquals(term.positions[upto2].length, postings.freq());
if (LuceneTestCase.random.nextInt(2) == 1) {
if (random().nextInt(2) == 1) {
this.verifyPositions(term.positions[upto2], postings);
}
}
@ -616,9 +616,9 @@ public class TestCodecs extends LuceneTestCase {
private void write(final FieldInfos fieldInfos, final Directory dir, final FieldData[] fields, boolean allowPreFlex) throws Throwable {
final int termIndexInterval = _TestUtil.nextInt(random, 13, 27);
final int termIndexInterval = _TestUtil.nextInt(random(), 13, 27);
final Codec codec = Codec.getDefault();
final SegmentWriteState state = new SegmentWriteState(InfoStream.getDefault(), dir, SEGMENT, fieldInfos, 10000, termIndexInterval, codec, null, newIOContext(random));
final SegmentWriteState state = new SegmentWriteState(InfoStream.getDefault(), dir, SEGMENT, fieldInfos, 10000, termIndexInterval, codec, null, newIOContext(random()));
final FieldsConsumer consumer = codec.postingsFormat().fieldsConsumer(state);
Arrays.sort(fields);

View File

@ -54,7 +54,7 @@ public class TestCompoundFile extends LuceneTestCase
private void createRandomFile(Directory dir, String name, int size)
throws IOException
{
IndexOutput os = dir.createOutput(name, newIOContext(random));
IndexOutput os = dir.createOutput(name, newIOContext(random()));
for (int i=0; i<size; i++) {
byte b = (byte) (Math.random() * 256);
os.writeByte(b);
@ -72,7 +72,7 @@ public class TestCompoundFile extends LuceneTestCase
int size)
throws IOException
{
IndexOutput os = dir.createOutput(name, newIOContext(random));
IndexOutput os = dir.createOutput(name, newIOContext(random()));
for (int i=0; i < size; i++) {
os.writeByte(start);
start ++;
@ -182,13 +182,13 @@ public class TestCompoundFile extends LuceneTestCase
for (int i=0; i<data.length; i++) {
String name = "t" + data[i];
createSequenceFile(dir, name, (byte) 0, data[i]);
CompoundFileDirectory csw = new CompoundFileDirectory(dir, name + ".cfs", newIOContext(random), true);
dir.copy(csw, name, name, newIOContext(random));
CompoundFileDirectory csw = new CompoundFileDirectory(dir, name + ".cfs", newIOContext(random()), true);
dir.copy(csw, name, name, newIOContext(random()));
csw.close();
CompoundFileDirectory csr = new CompoundFileDirectory(dir, name + ".cfs", newIOContext(random), false);
IndexInput expected = dir.openInput(name, newIOContext(random));
IndexInput actual = csr.openInput(name, newIOContext(random));
CompoundFileDirectory csr = new CompoundFileDirectory(dir, name + ".cfs", newIOContext(random()), false);
IndexInput expected = dir.openInput(name, newIOContext(random()));
IndexInput actual = csr.openInput(name, newIOContext(random()));
assertSameStreams(name, expected, actual);
assertSameSeekBehavior(name, expected, actual);
expected.close();
@ -205,21 +205,21 @@ public class TestCompoundFile extends LuceneTestCase
createSequenceFile(dir, "d1", (byte) 0, 15);
createSequenceFile(dir, "d2", (byte) 0, 114);
CompoundFileDirectory csw = new CompoundFileDirectory(dir, "d.cfs", newIOContext(random), true);
dir.copy(csw, "d1", "d1", newIOContext(random));
dir.copy(csw, "d2", "d2", newIOContext(random));
CompoundFileDirectory csw = new CompoundFileDirectory(dir, "d.cfs", newIOContext(random()), true);
dir.copy(csw, "d1", "d1", newIOContext(random()));
dir.copy(csw, "d2", "d2", newIOContext(random()));
csw.close();
CompoundFileDirectory csr = new CompoundFileDirectory(dir, "d.cfs", newIOContext(random), false);
IndexInput expected = dir.openInput("d1", newIOContext(random));
IndexInput actual = csr.openInput("d1", newIOContext(random));
CompoundFileDirectory csr = new CompoundFileDirectory(dir, "d.cfs", newIOContext(random()), false);
IndexInput expected = dir.openInput("d1", newIOContext(random()));
IndexInput actual = csr.openInput("d1", newIOContext(random()));
assertSameStreams("d1", expected, actual);
assertSameSeekBehavior("d1", expected, actual);
expected.close();
actual.close();
expected = dir.openInput("d2", newIOContext(random));
actual = csr.openInput("d2", newIOContext(random));
expected = dir.openInput("d2", newIOContext(random()));
actual = csr.openInput("d2", newIOContext(random()));
assertSameStreams("d2", expected, actual);
assertSameSeekBehavior("d2", expected, actual);
expected.close();
@ -255,21 +255,21 @@ public class TestCompoundFile extends LuceneTestCase
createRandomFile(dir, segment + ".notIn2", 51);
// Now test
CompoundFileDirectory csw = new CompoundFileDirectory(dir, "test.cfs", newIOContext(random), true);
CompoundFileDirectory csw = new CompoundFileDirectory(dir, "test.cfs", newIOContext(random()), true);
final String data[] = new String[] {
".zero", ".one", ".ten", ".hundred", ".big1", ".big2", ".big3",
".big4", ".big5", ".big6", ".big7"
};
for (int i=0; i<data.length; i++) {
String fileName = segment + data[i];
dir.copy(csw, fileName, fileName, newIOContext(random));
dir.copy(csw, fileName, fileName, newIOContext(random()));
}
csw.close();
CompoundFileDirectory csr = new CompoundFileDirectory(dir, "test.cfs", newIOContext(random), false);
CompoundFileDirectory csr = new CompoundFileDirectory(dir, "test.cfs", newIOContext(random()), false);
for (int i=0; i<data.length; i++) {
IndexInput check = dir.openInput(segment + data[i], newIOContext(random));
IndexInput test = csr.openInput(segment + data[i], newIOContext(random));
IndexInput check = dir.openInput(segment + data[i], newIOContext(random()));
IndexInput test = csr.openInput(segment + data[i], newIOContext(random()));
assertSameStreams(data[i], check, test);
assertSameSeekBehavior(data[i], check, test);
test.close();
@ -285,11 +285,11 @@ public class TestCompoundFile extends LuceneTestCase
* the size of each file is 1000 bytes.
*/
private void setUp_2() throws IOException {
CompoundFileDirectory cw = new CompoundFileDirectory(dir, "f.comp", newIOContext(random), true);
CompoundFileDirectory cw = new CompoundFileDirectory(dir, "f.comp", newIOContext(random()), true);
for (int i=0; i<20; i++) {
createSequenceFile(dir, "f" + i, (byte) 0, 2000);
String fileName = "f" + i;
dir.copy(cw, fileName, fileName, newIOContext(random));
dir.copy(cw, fileName, fileName, newIOContext(random()));
}
cw.close();
}
@ -336,16 +336,16 @@ public class TestCompoundFile extends LuceneTestCase
public void testClonedStreamsClosing() throws IOException {
setUp_2();
CompoundFileDirectory cr = new CompoundFileDirectory(dir, "f.comp", newIOContext(random), false);
CompoundFileDirectory cr = new CompoundFileDirectory(dir, "f.comp", newIOContext(random()), false);
// basic clone
IndexInput expected = dir.openInput("f11", newIOContext(random));
IndexInput expected = dir.openInput("f11", newIOContext(random()));
// this test only works for FSIndexInput
assertTrue(_TestHelper.isSimpleFSIndexInput(expected));
assertTrue(_TestHelper.isSimpleFSIndexInputOpen(expected));
IndexInput one = cr.openInput("f11", newIOContext(random));
IndexInput one = cr.openInput("f11", newIOContext(random()));
IndexInput two = (IndexInput) one.clone();
@ -388,14 +388,14 @@ public class TestCompoundFile extends LuceneTestCase
*/
public void testRandomAccess() throws IOException {
setUp_2();
CompoundFileDirectory cr = new CompoundFileDirectory(dir, "f.comp", newIOContext(random), false);
CompoundFileDirectory cr = new CompoundFileDirectory(dir, "f.comp", newIOContext(random()), false);
// Open two files
IndexInput e1 = dir.openInput("f11", newIOContext(random));
IndexInput e2 = dir.openInput("f3", newIOContext(random));
IndexInput e1 = dir.openInput("f11", newIOContext(random()));
IndexInput e2 = dir.openInput("f3", newIOContext(random()));
IndexInput a1 = cr.openInput("f11", newIOContext(random));
IndexInput a2 = dir.openInput("f3", newIOContext(random));
IndexInput a1 = cr.openInput("f11", newIOContext(random()));
IndexInput a2 = dir.openInput("f3", newIOContext(random()));
// Seek the first pair
e1.seek(100);
@ -467,11 +467,11 @@ public class TestCompoundFile extends LuceneTestCase
*/
public void testRandomAccessClones() throws IOException {
setUp_2();
CompoundFileDirectory cr = new CompoundFileDirectory(dir, "f.comp", newIOContext(random), false);
CompoundFileDirectory cr = new CompoundFileDirectory(dir, "f.comp", newIOContext(random()), false);
// Open two files
IndexInput e1 = cr.openInput("f11", newIOContext(random));
IndexInput e2 = cr.openInput("f3", newIOContext(random));
IndexInput e1 = cr.openInput("f11", newIOContext(random()));
IndexInput e2 = cr.openInput("f3", newIOContext(random()));
IndexInput a1 = (IndexInput) e1.clone();
IndexInput a2 = (IndexInput) e2.clone();
@ -544,11 +544,11 @@ public class TestCompoundFile extends LuceneTestCase
public void testFileNotFound() throws IOException {
setUp_2();
CompoundFileDirectory cr = new CompoundFileDirectory(dir, "f.comp", newIOContext(random), false);
CompoundFileDirectory cr = new CompoundFileDirectory(dir, "f.comp", newIOContext(random()), false);
// Open two files
try {
cr.openInput("bogus", newIOContext(random));
cr.openInput("bogus", newIOContext(random()));
fail("File not found");
} catch (IOException e) {
@ -562,8 +562,8 @@ public class TestCompoundFile extends LuceneTestCase
public void testReadPastEOF() throws IOException {
setUp_2();
CompoundFileDirectory cr = new CompoundFileDirectory(dir, "f.comp", newIOContext(random), false);
IndexInput is = cr.openInput("f2", newIOContext(random));
CompoundFileDirectory cr = new CompoundFileDirectory(dir, "f.comp", newIOContext(random()), false);
IndexInput is = cr.openInput("f2", newIOContext(random()));
is.seek(is.length() - 10);
byte b[] = new byte[100];
is.readBytes(b, 0, 10);
@ -593,7 +593,7 @@ public class TestCompoundFile extends LuceneTestCase
* will correctly increment the file pointer.
*/
public void testLargeWrites() throws IOException {
IndexOutput os = dir.createOutput("testBufferStart.txt", newIOContext(random));
IndexOutput os = dir.createOutput("testBufferStart.txt", newIOContext(random()));
byte[] largeBuf = new byte[2048];
for (int i=0; i<largeBuf.length; i++) {
@ -615,13 +615,13 @@ public class TestCompoundFile extends LuceneTestCase
createSequenceFile(dir, "d1", (byte) 0, 15);
Directory newDir = newDirectory();
CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random), true);
dir.copy(csw, "d1", "d1", newIOContext(random));
CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), true);
dir.copy(csw, "d1", "d1", newIOContext(random()));
csw.close();
CompoundFileDirectory csr = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random), false);
IndexInput expected = dir.openInput("d1", newIOContext(random));
IndexInput actual = csr.openInput("d1", newIOContext(random));
CompoundFileDirectory csr = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), false);
IndexInput expected = dir.openInput("d1", newIOContext(random()));
IndexInput actual = csr.openInput("d1", newIOContext(random()));
assertSameStreams("d1", expected, actual);
assertSameSeekBehavior("d1", expected, actual);
expected.close();
@ -634,10 +634,10 @@ public class TestCompoundFile extends LuceneTestCase
public void testAppend() throws IOException {
Directory newDir = newDirectory();
CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random), true);
int size = 5 + random.nextInt(128);
CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), true);
int size = 5 + random().nextInt(128);
for (int j = 0; j < 2; j++) {
IndexOutput os = csw.createOutput("seg_" + j + "_foo.txt", newIOContext(random));
IndexOutput os = csw.createOutput("seg_" + j + "_foo.txt", newIOContext(random()));
for (int i = 0; i < size; i++) {
os.writeInt(i*j);
}
@ -647,14 +647,14 @@ public class TestCompoundFile extends LuceneTestCase
assertEquals("d.cfs", listAll[0]);
}
createSequenceFile(dir, "d1", (byte) 0, 15);
dir.copy(csw, "d1", "d1", newIOContext(random));
dir.copy(csw, "d1", "d1", newIOContext(random()));
String[] listAll = newDir.listAll();
assertEquals(1, listAll.length);
assertEquals("d.cfs", listAll[0]);
csw.close();
CompoundFileDirectory csr = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random), false);
CompoundFileDirectory csr = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), false);
for (int j = 0; j < 2; j++) {
IndexInput openInput = csr.openInput("seg_" + j + "_foo.txt", newIOContext(random));
IndexInput openInput = csr.openInput("seg_" + j + "_foo.txt", newIOContext(random()));
assertEquals(size * 4, openInput.length());
for (int i = 0; i < size; i++) {
assertEquals(i*j, openInput.readInt());
@ -663,8 +663,8 @@ public class TestCompoundFile extends LuceneTestCase
openInput.close();
}
IndexInput expected = dir.openInput("d1", newIOContext(random));
IndexInput actual = csr.openInput("d1", newIOContext(random));
IndexInput expected = dir.openInput("d1", newIOContext(random()));
IndexInput actual = csr.openInput("d1", newIOContext(random()));
assertSameStreams("d1", expected, actual);
assertSameSeekBehavior("d1", expected, actual);
expected.close();
@ -675,12 +675,12 @@ public class TestCompoundFile extends LuceneTestCase
public void testAppendTwice() throws IOException {
Directory newDir = newDirectory();
CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random), true);
CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), true);
createSequenceFile(newDir, "d1", (byte) 0, 15);
IndexOutput out = csw.createOutput("d.xyz", newIOContext(random));
IndexOutput out = csw.createOutput("d.xyz", newIOContext(random()));
out.writeInt(0);
try {
newDir.copy(csw, "d1", "d1", newIOContext(random));
newDir.copy(csw, "d1", "d1", newIOContext(random()));
fail("file does already exist");
} catch (IllegalArgumentException e) {
//
@ -691,7 +691,7 @@ public class TestCompoundFile extends LuceneTestCase
csw.close();
CompoundFileDirectory cfr = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random), false);
CompoundFileDirectory cfr = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), false);
assertEquals(1, cfr.listAll().length);
assertEquals("d.xyz", cfr.listAll()[0]);
cfr.close();
@ -700,10 +700,10 @@ public class TestCompoundFile extends LuceneTestCase
public void testEmptyCFS() throws IOException {
Directory newDir = newDirectory();
CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random), true);
CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), true);
csw.close();
CompoundFileDirectory csr = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random), false);
CompoundFileDirectory csr = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), false);
assertEquals(0, csr.listAll().length);
csr.close();
@ -712,32 +712,32 @@ public class TestCompoundFile extends LuceneTestCase
public void testReadNestedCFP() throws IOException {
Directory newDir = newDirectory();
CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random), true);
CompoundFileDirectory nested = new CompoundFileDirectory(newDir, "b.cfs", newIOContext(random), true);
IndexOutput out = nested.createOutput("b.xyz", newIOContext(random));
IndexOutput out1 = nested.createOutput("b_1.xyz", newIOContext(random));
CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), true);
CompoundFileDirectory nested = new CompoundFileDirectory(newDir, "b.cfs", newIOContext(random()), true);
IndexOutput out = nested.createOutput("b.xyz", newIOContext(random()));
IndexOutput out1 = nested.createOutput("b_1.xyz", newIOContext(random()));
out.writeInt(0);
out1.writeInt(1);
out.close();
out1.close();
nested.close();
newDir.copy(csw, "b.cfs", "b.cfs", newIOContext(random));
newDir.copy(csw, "b.cfe", "b.cfe", newIOContext(random));
newDir.copy(csw, "b.cfs", "b.cfs", newIOContext(random()));
newDir.copy(csw, "b.cfe", "b.cfe", newIOContext(random()));
newDir.deleteFile("b.cfs");
newDir.deleteFile("b.cfe");
csw.close();
assertEquals(2, newDir.listAll().length);
csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random), false);
csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), false);
assertEquals(2, csw.listAll().length);
nested = new CompoundFileDirectory(csw, "b.cfs", newIOContext(random), false);
nested = new CompoundFileDirectory(csw, "b.cfs", newIOContext(random()), false);
assertEquals(2, nested.listAll().length);
IndexInput openInput = nested.openInput("b.xyz", newIOContext(random));
IndexInput openInput = nested.openInput("b.xyz", newIOContext(random()));
assertEquals(0, openInput.readInt());
openInput.close();
openInput = nested.openInput("b_1.xyz", newIOContext(random));
openInput = nested.openInput("b_1.xyz", newIOContext(random()));
assertEquals(1, openInput.readInt());
openInput.close();
nested.close();
@ -747,8 +747,8 @@ public class TestCompoundFile extends LuceneTestCase
public void testDoubleClose() throws IOException {
Directory newDir = newDirectory();
CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random), true);
IndexOutput out = csw.createOutput("d.xyz", newIOContext(random));
CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), true);
IndexOutput out = csw.createOutput("d.xyz", newIOContext(random()));
out.writeInt(0);
out.close();
@ -756,8 +756,8 @@ public class TestCompoundFile extends LuceneTestCase
// close a second time - must have no effect according to Closeable
csw.close();
csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random), false);
IndexInput openInput = csw.openInput("d.xyz", newIOContext(random));
csw = new CompoundFileDirectory(newDir, "d.cfs", newIOContext(random()), false);
IndexInput openInput = csw.openInput("d.xyz", newIOContext(random()));
assertEquals(0, openInput.readInt());
openInput.close();
csw.close();
@ -776,22 +776,22 @@ public class TestCompoundFile extends LuceneTestCase
final int FILE_COUNT = atLeast(500);
for(int fileIdx=0;fileIdx<FILE_COUNT;fileIdx++) {
IndexOutput out = d.createOutput("file." + fileIdx, newIOContext(random));
IndexOutput out = d.createOutput("file." + fileIdx, newIOContext(random()));
out.writeByte((byte) fileIdx);
out.close();
}
final CompoundFileDirectory cfd = new CompoundFileDirectory(d, "c.cfs", newIOContext(random), true);
final CompoundFileDirectory cfd = new CompoundFileDirectory(d, "c.cfs", newIOContext(random()), true);
for(int fileIdx=0;fileIdx<FILE_COUNT;fileIdx++) {
final String fileName = "file." + fileIdx;
d.copy(cfd, fileName, fileName, newIOContext(random));
d.copy(cfd, fileName, fileName, newIOContext(random()));
}
cfd.close();
final IndexInput[] ins = new IndexInput[FILE_COUNT];
final CompoundFileDirectory cfr = new CompoundFileDirectory(d, "c.cfs", newIOContext(random), false);
final CompoundFileDirectory cfr = new CompoundFileDirectory(d, "c.cfs", newIOContext(random()), false);
for(int fileIdx=0;fileIdx<FILE_COUNT;fileIdx++) {
ins[fileIdx] = cfr.openInput("file." + fileIdx, newIOContext(random));
ins[fileIdx] = cfr.openInput("file." + fileIdx, newIOContext(random()));
}
for(int fileIdx=0;fileIdx<FILE_COUNT;fileIdx++) {

View File

@ -58,7 +58,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
isClose = true;
}
}
if (isDoFlush && !isClose && random.nextBoolean()) {
if (isDoFlush && !isClose && random().nextBoolean()) {
hitExc = true;
throw new IOException(Thread.currentThread().getName() + ": now failing during flush");
}
@ -73,7 +73,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
FailOnlyOnFlush failure = new FailOnlyOnFlush();
directory.failOn(failure);
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2));
Document doc = new Document();
Field idField = newField("id", "", StringField.TYPE_STORED);
doc.add(idField);
@ -130,7 +130,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
// start:
mp.setMinMergeDocs(1000);
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMergePolicy(mp));
Document doc = new Document();
@ -168,7 +168,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
public void testNoExtraFiles() throws IOException {
MockDirectoryWrapper directory = newDirectory();
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(2));
for(int iter=0;iter<7;iter++) {
@ -187,7 +187,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
// Reopen
writer = new IndexWriter(directory, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(2));
}
@ -204,7 +204,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
IndexWriter writer = new IndexWriter(
directory,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setMergePolicy(newLogMergePolicy(100))
);
@ -237,7 +237,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
// Reopen
writer = new IndexWriter(
directory,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setOpenMode(OpenMode.APPEND).
setMergePolicy(newLogMergePolicy(100))
);

View File

@ -37,7 +37,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
public void testSameFieldNumbersAcrossSegments() throws Exception {
for (int i = 0; i < 2; i++) {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
Document d1 = new Document();
d1.add(new Field("f1", "first field", StringField.TYPE_STORED));
@ -46,7 +46,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
if (i == 1) {
writer.close();
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
} else {
writer.commit();
}
@ -76,7 +76,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
assertEquals("f3", fis2.fieldInfo(2).name);
assertEquals("f4", fis2.fieldInfo(3).name);
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer.forceMerge(1);
writer.close();
@ -100,7 +100,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
public void testAddIndexes() throws Exception {
Directory dir1 = newDirectory();
Directory dir2 = newDirectory();
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
Document d1 = new Document();
d1.add(new Field("f1", "first field", TextField.TYPE_STORED));
@ -108,7 +108,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
writer.addDocument(d1);
writer.close();
writer = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
writer = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
Document d2 = new Document();
FieldType customType2 = new FieldType(TextField.TYPE_STORED);
@ -121,7 +121,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
writer.close();
writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
writer.addIndexes(dir2);
writer.close();
@ -140,7 +140,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
assertEquals("f3", fis2.fieldInfo(2).name);
assertEquals("f4", fis2.fieldInfo(3).name);
writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer.forceMerge(1);
writer.close();
@ -166,7 +166,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
Directory dir = newDirectory();
{
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(
NoMergePolicy.NO_COMPOUND_FILES));
Document d = new Document();
d.add(new Field("f1", "d1 first field", TextField.TYPE_STORED));
@ -184,8 +184,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
{
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
random.nextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(
random().nextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES
: NoMergePolicy.COMPOUND_FILES));
Document d = new Document();
d.add(new Field("f1", "d2 first field", TextField.TYPE_STORED));
@ -206,8 +206,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
{
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
random.nextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(
random().nextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES
: NoMergePolicy.COMPOUND_FILES));
Document d = new Document();
d.add(new Field("f1", "d3 first field", TextField.TYPE_STORED));
@ -233,8 +233,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
{
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
random.nextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(
random().nextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES
: NoMergePolicy.COMPOUND_FILES));
writer.deleteDocuments(new Term("f1", "d1"));
// nuke the first segment entirely so that the segment with gaps is
@ -244,7 +244,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
}
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(
new LogByteSizeMergePolicy()).setInfoStream(new FailOnNonBulkMergesInfoStream()));
writer.forceMerge(1);
writer.close();
@ -268,12 +268,12 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
int[][] docs = new int[NUM_DOCS][4];
for (int i = 0; i < docs.length; i++) {
for (int j = 0; j < docs[i].length;j++) {
docs[i][j] = random.nextInt(MAX_FIELDS);
docs[i][j] = random().nextInt(MAX_FIELDS);
}
}
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
for (int i = 0; i < NUM_DOCS; i++) {
Document d = new Document();

View File

@ -65,7 +65,7 @@ public class TestCrash extends LuceneTestCase {
// This test relies on being able to open a reader before any commit
// happened, so we must create an initial commit just to allow that, but
// before any documents were added.
IndexWriter writer = initIndex(random, true);
IndexWriter writer = initIndex(random(), true);
MockDirectoryWrapper dir = (MockDirectoryWrapper) writer.getDirectory();
crash(writer);
IndexReader reader = IndexReader.open(dir);
@ -78,11 +78,11 @@ public class TestCrash extends LuceneTestCase {
// This test relies on being able to open a reader before any commit
// happened, so we must create an initial commit just to allow that, but
// before any documents were added.
IndexWriter writer = initIndex(random, true);
IndexWriter writer = initIndex(random(), true);
MockDirectoryWrapper dir = (MockDirectoryWrapper) writer.getDirectory();
dir.setPreventDoubleWrite(false);
crash(writer);
writer = initIndex(random, dir, false);
writer = initIndex(random(), dir, false);
writer.close();
IndexReader reader = IndexReader.open(dir);
@ -92,10 +92,10 @@ public class TestCrash extends LuceneTestCase {
}
public void testCrashAfterReopen() throws IOException {
IndexWriter writer = initIndex(random, false);
IndexWriter writer = initIndex(random(), false);
MockDirectoryWrapper dir = (MockDirectoryWrapper) writer.getDirectory();
writer.close();
writer = initIndex(random, dir, false);
writer = initIndex(random(), dir, false);
assertEquals(314, writer.maxDoc());
crash(writer);
@ -116,7 +116,7 @@ public class TestCrash extends LuceneTestCase {
public void testCrashAfterClose() throws IOException {
IndexWriter writer = initIndex(random, false);
IndexWriter writer = initIndex(random(), false);
MockDirectoryWrapper dir = (MockDirectoryWrapper) writer.getDirectory();
writer.close();
@ -137,7 +137,7 @@ public class TestCrash extends LuceneTestCase {
public void testCrashAfterCloseNoWait() throws IOException {
IndexWriter writer = initIndex(random, false);
IndexWriter writer = initIndex(random(), false);
MockDirectoryWrapper dir = (MockDirectoryWrapper) writer.getDirectory();
writer.close(false);

View File

@ -70,7 +70,7 @@ public class TestCrashCausesCorruptIndex extends LuceneTestCase {
// NOTE: cannot use RandomIndexWriter because it
// sometimes commits:
IndexWriter indexWriter = new IndexWriter(crashAfterCreateOutput,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
indexWriter.addDocument(getDocument());
// writes segments_1:
@ -103,7 +103,7 @@ public class TestCrashCausesCorruptIndex extends LuceneTestCase {
// it doesn't know what to do with the created but empty
// segments_2 file
IndexWriter indexWriter = new IndexWriter(realDirectory,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
// currently the test fails above.
// however, to test the fix, the following lines should pass as well.

View File

@ -57,15 +57,15 @@ public class TestCustomNorms extends LuceneTestCase {
MockDirectoryWrapper dir = newDirectory();
dir.setCheckIndexOnClose(false); // can't set sim to checkindex yet
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random));
new MockAnalyzer(random()));
Similarity provider = new MySimProvider();
config.setSimilarity(provider);
RandomIndexWriter writer = new RandomIndexWriter(random, dir, config);
final LineFileDocs docs = new LineFileDocs(random);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config);
final LineFileDocs docs = new LineFileDocs(random());
int num = atLeast(100);
for (int i = 0; i < num; i++) {
Document doc = docs.nextDoc();
float nextFloat = random.nextFloat();
float nextFloat = random().nextFloat();
Field f = new Field(floatTestField, "" + nextFloat, TextField.TYPE_STORED);
f.setBoost(nextFloat);
@ -98,16 +98,16 @@ public class TestCustomNorms extends LuceneTestCase {
MockDirectoryWrapper dir = newDirectory();
dir.setCheckIndexOnClose(false); // can't set sim to checkindex yet
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random));
new MockAnalyzer(random()));
Similarity provider = new MySimProvider();
config.setSimilarity(provider);
RandomIndexWriter writer = new RandomIndexWriter(random, dir, config);
final LineFileDocs docs = new LineFileDocs(random);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config);
final LineFileDocs docs = new LineFileDocs(random());
int num = atLeast(100);
try {
for (int i = 0; i < num; i++) {
Document doc = docs.nextDoc();
float nextFloat = random.nextFloat();
float nextFloat = random().nextFloat();
Field f = new Field(exceptionTestField, "" + nextFloat,
TextField.TYPE_STORED);
f.setBoost(nextFloat);
@ -142,7 +142,7 @@ public class TestCustomNorms extends LuceneTestCase {
if (floatTestField.equals(field)) {
return new FloatEncodingBoostSimilarity();
} else if (exceptionTestField.equals(field)) {
return new RandomTypeSimilarity(random);
return new RandomTypeSimilarity(random());
} else {
return delegate;
}

View File

@ -203,7 +203,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
Directory dir = newDirectory();
ExpirationTimeDeletionPolicy policy = new ExpirationTimeDeletionPolicy(dir, SECONDS);
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random))
new MockAnalyzer(random()))
.setIndexDeletionPolicy(policy);
MergePolicy mp = conf.getMergePolicy();
if (mp instanceof LogMergePolicy) {
@ -223,7 +223,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
// past commits
lastDeleteTime = System.currentTimeMillis();
conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setOpenMode(
new MockAnalyzer(random())).setOpenMode(
OpenMode.APPEND).setIndexDeletionPolicy(policy);
mp = conf.getMergePolicy();
if (mp instanceof LogMergePolicy) {
@ -310,7 +310,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
policy.dir = dir;
IndexWriterConfig conf = newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setIndexDeletionPolicy(policy).setMaxBufferedDocs(10)
.setMergeScheduler(new SerialMergeScheduler());
MergePolicy mp = conf.getMergePolicy();
@ -331,7 +331,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
}
if (needsMerging) {
conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setOpenMode(
new MockAnalyzer(random())).setOpenMode(
OpenMode.APPEND).setIndexDeletionPolicy(policy);
mp = conf.getMergePolicy();
if (mp instanceof LogMergePolicy) {
@ -379,7 +379,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
int preCount = dir.listAll().length;
writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setOpenMode(
new MockAnalyzer(random())).setOpenMode(
OpenMode.APPEND).setIndexDeletionPolicy(policy));
writer.close();
int postCount = dir.listAll().length;
@ -403,7 +403,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setIndexDeletionPolicy(policy).
setMaxBufferedDocs(2).
setMergePolicy(newLogMergePolicy(10))
@ -425,7 +425,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
assertTrue(lastCommit != null);
// Now add 1 doc and merge
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(policy));
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setIndexDeletionPolicy(policy));
addDoc(writer);
assertEquals(11, writer.numDocs());
writer.forceMerge(1);
@ -434,7 +434,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
assertEquals(6, DirectoryReader.listCommits(dir).size());
// Now open writer on the commit just before merge:
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setIndexDeletionPolicy(policy).setIndexCommit(lastCommit));
assertEquals(10, writer.numDocs());
@ -447,7 +447,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
assertEquals(11, r.numDocs());
r.close();
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setIndexDeletionPolicy(policy).setIndexCommit(lastCommit));
assertEquals(10, writer.numDocs());
// Commits the rollback:
@ -464,7 +464,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
r.close();
// Re-merge
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(policy));
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setIndexDeletionPolicy(policy));
writer.forceMerge(1);
writer.close();
@ -475,7 +475,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
// Now open writer on the commit just before merging,
// but this time keeping only the last commit:
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexCommit(lastCommit));
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setIndexCommit(lastCommit));
assertEquals(10, writer.numDocs());
// Reader still sees fully merged index, because writer
@ -511,7 +511,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy)
.setMaxBufferedDocs(10);
MergePolicy mp = conf.getMergePolicy();
@ -524,7 +524,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
}
writer.close();
conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy);
mp = conf.getMergePolicy();
if (mp instanceof LogMergePolicy) {
@ -564,7 +564,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
for(int j=0;j<N+1;j++) {
IndexWriterConfig conf = newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy)
.setMaxBufferedDocs(10);
MergePolicy mp = conf.getMergePolicy();
@ -625,7 +625,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy)
.setMaxBufferedDocs(10);
MergePolicy mp = conf.getMergePolicy();
@ -640,7 +640,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
for(int i=0;i<N+1;i++) {
conf = newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy)
.setMaxBufferedDocs(10);
mp = conf.getMergePolicy();
@ -653,7 +653,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
}
// this is a commit
writer.close();
conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setIndexDeletionPolicy(policy)
.setMergePolicy(NoMergePolicy.COMPOUND_FILES);
writer = new IndexWriter(dir, conf);
@ -667,7 +667,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
reader.close();
writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy));
// This will not commit: there are no changes
// pending because we opened for "create":

View File

@ -56,8 +56,8 @@ public class TestDirectoryReader extends LuceneTestCase {
Document doc2 = new Document();
DocHelper.setupDoc(doc1);
DocHelper.setupDoc(doc2);
DocHelper.writeDoc(random, dir, doc1);
DocHelper.writeDoc(random, dir, doc2);
DocHelper.writeDoc(random(), dir, doc1);
DocHelper.writeDoc(random(), dir, doc2);
DirectoryReader reader = DirectoryReader.open(dir);
assertTrue(reader != null);
assertTrue(reader instanceof StandardDirectoryReader);
@ -79,11 +79,11 @@ public class TestDirectoryReader extends LuceneTestCase {
public void testMultiTermDocs() throws IOException {
Directory ramDir1=newDirectory();
addDoc(random, ramDir1, "test foo", true);
addDoc(random(), ramDir1, "test foo", true);
Directory ramDir2=newDirectory();
addDoc(random, ramDir2, "test blah", true);
addDoc(random(), ramDir2, "test blah", true);
Directory ramDir3=newDirectory();
addDoc(random, ramDir3, "test wow", true);
addDoc(random(), ramDir3, "test wow", true);
IndexReader[] readers1 = new IndexReader[]{DirectoryReader.open(ramDir1), DirectoryReader.open(ramDir3)};
IndexReader[] readers2 = new IndexReader[]{DirectoryReader.open(ramDir1), DirectoryReader.open(ramDir2), DirectoryReader.open(ramDir3)};
@ -93,7 +93,7 @@ public class TestDirectoryReader extends LuceneTestCase {
// test mixing up TermDocs and TermEnums from different readers.
TermsEnum te2 = MultiFields.getTerms(mr2, "body").iterator(null);
te2.seekCeil(new BytesRef("wow"));
DocsEnum td = _TestUtil.docs(random, mr2,
DocsEnum td = _TestUtil.docs(random(), mr2,
"body",
te2.term(),
MultiFields.getLiveDocs(mr2),
@ -102,7 +102,7 @@ public class TestDirectoryReader extends LuceneTestCase {
TermsEnum te3 = MultiFields.getTerms(mr3, "body").iterator(null);
te3.seekCeil(new BytesRef("wow"));
td = _TestUtil.docs(random, te3, MultiFields.getLiveDocs(mr3),
td = _TestUtil.docs(random(), te3, MultiFields.getLiveDocs(mr3),
td,
false);
@ -139,7 +139,7 @@ public class TestDirectoryReader extends LuceneTestCase {
public void testIsCurrent() throws Exception {
Directory d = newDirectory();
IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
addDocumentWithFields(writer);
writer.close();
// set up reader:
@ -147,13 +147,13 @@ public class TestDirectoryReader extends LuceneTestCase {
assertTrue(reader.isCurrent());
// modify index by adding another document:
writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
addDocumentWithFields(writer);
writer.close();
assertFalse(reader.isCurrent());
// re-create index:
writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE));
addDocumentWithFields(writer);
writer.close();
assertFalse(reader.isCurrent());
@ -170,7 +170,7 @@ public class TestDirectoryReader extends LuceneTestCase {
// set up writer
IndexWriter writer = new IndexWriter(
d,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
);
Document doc = new Document();
@ -196,7 +196,7 @@ public class TestDirectoryReader extends LuceneTestCase {
// add more documents
writer = new IndexWriter(
d,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setOpenMode(OpenMode.APPEND).
setMergePolicy(newLogMergePolicy())
);
@ -314,7 +314,7 @@ public void testTermVectors() throws Exception {
// set up writer
IndexWriter writer = new IndexWriter(
d,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergePolicy(newLogMergePolicy())
);
// want to get some more segments here
@ -346,12 +346,12 @@ public void testTermVectors() throws Exception {
d.close();
}
static void assertTermDocsCount(String msg,
void assertTermDocsCount(String msg,
IndexReader reader,
Term term,
int expected)
throws IOException {
DocsEnum tdocs = _TestUtil.docs(random, reader,
DocsEnum tdocs = _TestUtil.docs(random(), reader,
term.field(),
new BytesRef(term.text()),
MultiFields.getLiveDocs(reader),
@ -371,7 +371,7 @@ static void assertTermDocsCount(String msg,
Directory dir = newDirectory();
byte[] bin = new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
for (int i = 0; i < 10; i++) {
addDoc(writer, "document number " + (i + 1));
@ -380,7 +380,7 @@ static void assertTermDocsCount(String msg,
addDocumentWithTermVectorFields(writer);
}
writer.close();
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
Document doc = new Document();
doc.add(new StoredField("bin1", bin));
doc.add(new TextField("junk", "junk text"));
@ -402,7 +402,7 @@ static void assertTermDocsCount(String msg,
// force merge
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
writer.forceMerge(1);
writer.close();
reader = DirectoryReader.open(dir);
@ -440,7 +440,7 @@ public void testFilesOpenClose() throws IOException {
// Create initial data set
File dirFile = _TestUtil.getTempDir("TestIndexReader.testFilesOpenClose");
Directory dir = newFSDirectory(dirFile);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
addDoc(writer, "test");
writer.close();
dir.close();
@ -450,7 +450,7 @@ public void testFilesOpenClose() throws IOException {
dir = newFSDirectory(dirFile);
// Now create the data set again, just as before
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE));
addDoc(writer, "test");
writer.close();
dir.close();
@ -654,7 +654,7 @@ public void testFilesOpenClose() throws IOException {
// set up writer
IndexWriter writer = new IndexWriter(
d,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setMergePolicy(newLogMergePolicy(10))
);
@ -674,7 +674,7 @@ public void testFilesOpenClose() throws IOException {
// Change the index
writer = new IndexWriter(
d,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setOpenMode(OpenMode.APPEND).
setMaxBufferedDocs(2).
setMergePolicy(newLogMergePolicy(10))
@ -690,7 +690,7 @@ public void testFilesOpenClose() throws IOException {
r2.close();
writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random))
new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND));
writer.forceMerge(1);
writer.close();
@ -735,7 +735,7 @@ public void testFilesOpenClose() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(2));
writer.addDocument(createDocument("a"));
writer.addDocument(createDocument("a"));
@ -762,7 +762,7 @@ public void testFilesOpenClose() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergePolicy(newLogMergePolicy(10))
);
Document doc = new Document();
@ -797,7 +797,7 @@ public void testFilesOpenClose() throws IOException {
// LUCENE-1586: getUniqueTermCount
public void testUniqueTermCount() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document doc = new Document();
doc.add(newField("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", TextField.TYPE_UNSTORED));
doc.add(newField("number", "0 1 2 3 4 5 6 7 8 9", TextField.TYPE_UNSTORED));
@ -826,7 +826,7 @@ public void testFilesOpenClose() throws IOException {
// LUCENE-1609: don't load terms index
public void testNoTermsIndex() throws Throwable {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setCodec(_TestUtil.alwaysPostingsFormat(new Lucene40PostingsFormat())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(_TestUtil.alwaysPostingsFormat(new Lucene40PostingsFormat())));
Document doc = new Document();
doc.add(newField("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", TextField.TYPE_UNSTORED));
doc.add(newField("number", "0 1 2 3 4 5 6 7 8 9", TextField.TYPE_UNSTORED));
@ -845,7 +845,7 @@ public void testFilesOpenClose() throws IOException {
assertEquals(-1, ((SegmentReader) r.getSequentialSubReaders()[0]).getTermInfosIndexDivisor());
writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setCodec(_TestUtil.alwaysPostingsFormat(new Lucene40PostingsFormat())).
setMergePolicy(newLogMergePolicy(10))
);
@ -875,7 +875,7 @@ public void testFilesOpenClose() throws IOException {
public void testPrepareCommitIsCurrent() throws Throwable {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer.commit();
Document doc = new Document();
writer.addDocument(doc);
@ -920,7 +920,7 @@ public void testFilesOpenClose() throws IOException {
// LUCENE-2812
public void testIndexExists() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer.addDocument(new Document());
writer.prepareCommit();
assertFalse(DirectoryReader.indexExists(dir));
@ -934,7 +934,7 @@ public void testFilesOpenClose() throws IOException {
// dict cache
public void testTotalTermFreqCached() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document d = new Document();
d.add(newField("f", "a a b", TextField.TYPE_UNSTORED));
writer.addDocument(d);
@ -955,7 +955,7 @@ public void testFilesOpenClose() throws IOException {
// LUCENE-2474
public void testReaderFinishedListener() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(3);
writer.addDocument(new Document());
writer.commit();
@ -988,7 +988,7 @@ public void testFilesOpenClose() throws IOException {
public void testOOBDocID() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer.addDocument(new Document());
DirectoryReader r = writer.getReader();
writer.close();
@ -1005,7 +1005,7 @@ public void testFilesOpenClose() throws IOException {
public void testTryIncRef() throws CorruptIndexException, LockObtainFailedException, IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer.addDocument(new Document());
writer.commit();
DirectoryReader r = DirectoryReader.open(dir);
@ -1019,7 +1019,7 @@ public void testFilesOpenClose() throws IOException {
public void testStressTryIncRef() throws CorruptIndexException, LockObtainFailedException, IOException, InterruptedException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer.addDocument(new Document());
writer.commit();
DirectoryReader r = DirectoryReader.open(dir);
@ -1027,7 +1027,7 @@ public void testFilesOpenClose() throws IOException {
IncThread[] threads = new IncThread[numThreads];
for (int i = 0; i < threads.length; i++) {
threads[i] = new IncThread(r, random);
threads[i] = new IncThread(r, random());
threads[i].start();
}
Thread.sleep(100);
@ -1071,7 +1071,7 @@ public void testFilesOpenClose() throws IOException {
public void testLoadCertainFields() throws Exception {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random, dir);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
doc.add(newField("field1", "foobar", StringField.TYPE_STORED));
doc.add(newField("field2", "foobaz", StringField.TYPE_STORED));

View File

@ -35,14 +35,10 @@ import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.similarities.DefaultSimilarity;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
@ -51,7 +47,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
public void testReopen() throws Exception {
final Directory dir1 = newDirectory();
createIndex(random, dir1, false);
createIndex(random(), dir1, false);
performDefaultTests(new TestReopen() {
@Override
@ -69,7 +65,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
final Directory dir2 = newDirectory();
createIndex(random, dir2, true);
createIndex(random(), dir2, true);
performDefaultTests(new TestReopen() {
@Override
@ -93,12 +89,12 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
// try this once with reopen once recreate, on both RAMDir and FSDir.
public void testCommitReopen () throws IOException {
Directory dir = newDirectory();
doTestReopenWithCommit(random, dir, true);
doTestReopenWithCommit(random(), dir, true);
dir.close();
}
public void testCommitRecreate () throws IOException {
Directory dir = newDirectory();
doTestReopenWithCommit(random, dir, false);
doTestReopenWithCommit(random(), dir, false);
dir.close();
}
@ -222,9 +218,9 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
public void testThreadSafety() throws Exception {
final Directory dir = newDirectory();
// NOTE: this also controls the number of threads!
final int n = _TestUtil.nextInt(random, 20, 40);
final int n = _TestUtil.nextInt(random(), 20, 40);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
for (int i = 0; i < n; i++) {
writer.addDocument(createDocument(i, 3));
}
@ -235,7 +231,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
@Override
protected void modifyIndex(int i) throws IOException {
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
modifier.addDocument(createDocument(n + i, 6));
modifier.close();
}
@ -249,7 +245,6 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
final List<ReaderCouple> readers = Collections.synchronizedList(new ArrayList<ReaderCouple>());
DirectoryReader firstReader = DirectoryReader.open(dir);
DirectoryReader reader = firstReader;
final Random rnd = random;
ReaderThread[] threads = new ReaderThread[n];
final Set<DirectoryReader> readersToClose = Collections.synchronizedSet(new HashSet<DirectoryReader>());
@ -273,6 +268,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
@Override
public void run() throws Exception {
Random rnd = LuceneTestCase.random();
while (!stopped) {
if (index % 2 == 0) {
// refresh reader synchronized
@ -301,7 +297,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
}
}
synchronized(this) {
wait(_TestUtil.nextInt(random, 1, 100));
wait(_TestUtil.nextInt(random(), 1, 100));
}
}
}
@ -311,6 +307,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
task = new ReaderThreadTask() {
@Override
public void run() throws Exception {
Random rnd = LuceneTestCase.random();
while (!stopped) {
int numReaders = readers.size();
if (numReaders > 0) {
@ -319,7 +316,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
}
synchronized(this) {
wait(_TestUtil.nextInt(random, 1, 100));
wait(_TestUtil.nextInt(random(), 1, 100));
}
}
}
@ -507,20 +504,20 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
if (VERBOSE) {
System.out.println("TEST: modify index");
}
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
w.deleteDocuments(new Term("field2", "a11"));
w.deleteDocuments(new Term("field2", "b30"));
w.close();
break;
}
case 1: {
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
w.forceMerge(1);
w.close();
break;
}
case 2: {
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
w.addDocument(createDocument(101, 4));
w.forceMerge(1);
w.addDocument(createDocument(102, 4));
@ -529,7 +526,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
break;
}
case 3: {
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
w.addDocument(createDocument(101, 4));
w.close();
break;
@ -587,7 +584,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setIndexDeletionPolicy(new KeepAllCommits()).
setMaxBufferedDocs(-1).
setMergePolicy(newLogMergePolicy(10))

View File

@ -110,7 +110,7 @@ public class TestDoc extends LuceneTestCase {
Directory directory = newFSDirectory(indexDir, null);
IndexWriter writer = new IndexWriter(
directory,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setOpenMode(OpenMode.CREATE).
setMaxBufferedDocs(-1).
setMergePolicy(newLogMergePolicy(10))
@ -145,7 +145,7 @@ public class TestDoc extends LuceneTestCase {
directory = newFSDirectory(indexDir, null);
writer = new IndexWriter(
directory,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setOpenMode(OpenMode.CREATE).
setMaxBufferedDocs(-1).
setMergePolicy(newLogMergePolicy(10))
@ -189,7 +189,7 @@ public class TestDoc extends LuceneTestCase {
private SegmentInfo merge(Directory dir, SegmentInfo si1, SegmentInfo si2, String merged, boolean useCompoundFile)
throws Exception {
IOContext context = newIOContext(random);
IOContext context = newIOContext(random());
SegmentReader r1 = new SegmentReader(si1, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, context);
SegmentReader r2 = new SegmentReader(si2, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, context);
@ -206,7 +206,7 @@ public class TestDoc extends LuceneTestCase {
false, codec, fieldInfos);
if (useCompoundFile) {
Collection<String> filesToDelete = IndexWriter.createCompoundFile(dir, merged + ".cfs", MergeState.CheckAbort.NONE, info, newIOContext(random));
Collection<String> filesToDelete = IndexWriter.createCompoundFile(dir, merged + ".cfs", MergeState.CheckAbort.NONE, info, newIOContext(random()));
info.setUseCompoundFile(true);
for (final String fileToDelete : filesToDelete)
si1.dir.deleteFile(fileToDelete);
@ -218,7 +218,7 @@ public class TestDoc extends LuceneTestCase {
private void printSegment(PrintWriter out, SegmentInfo si)
throws Exception {
SegmentReader reader = new SegmentReader(si, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
SegmentReader reader = new SegmentReader(si, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random()));
for (int i = 0; i < reader.numDocs(); i++)
out.println(reader.document(i));

View File

@ -34,7 +34,7 @@ public class TestDocCount extends LuceneTestCase {
assumeFalse("PreFlex codec does not support docCount statistic!",
"Lucene3x".equals(Codec.getDefault().getName()));
Directory dir = newDirectory();
RandomIndexWriter iw = new RandomIndexWriter(random, dir);
RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
int numDocs = atLeast(100);
for (int i = 0; i < numDocs; i++) {
iw.addDocument(doc());
@ -52,9 +52,9 @@ public class TestDocCount extends LuceneTestCase {
private Document doc() {
Document doc = new Document();
int numFields = _TestUtil.nextInt(random, 1, 10);
int numFields = _TestUtil.nextInt(random(), 1, 10);
for (int i = 0; i < numFields; i++) {
doc.add(newField("" + _TestUtil.nextInt(random, 'a', 'z'), "" + _TestUtil.nextInt(random, 'a', 'z'), StringField.TYPE_UNSTORED));
doc.add(newField("" + _TestUtil.nextInt(random(), 'a', 'z'), "" + _TestUtil.nextInt(random(), 'a', 'z'), StringField.TYPE_UNSTORED));
}
return doc;
}
@ -75,7 +75,7 @@ public class TestDocCount extends LuceneTestCase {
FixedBitSet visited = new FixedBitSet(ir.maxDoc());
TermsEnum te = terms.iterator(null);
while (te.next() != null) {
DocsEnum de = _TestUtil.docs(random, te, null, null, false);
DocsEnum de = _TestUtil.docs(random(), te, null, null, false);
while (de.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
visited.set(de.docID());
}

View File

@ -50,7 +50,7 @@ public class TestDocTermOrds extends LuceneTestCase {
public void testSimple() throws Exception {
Directory dir = newDirectory();
final RandomIndexWriter w = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
final RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
Document doc = new Document();
Field field = newField("field", "", TextField.TYPE_UNSTORED);
doc.add(field);
@ -96,7 +96,7 @@ public class TestDocTermOrds extends LuceneTestCase {
final int NUM_TERMS = atLeast(20);
final Set<BytesRef> terms = new HashSet<BytesRef>();
while(terms.size() < NUM_TERMS) {
final String s = _TestUtil.randomRealisticUnicodeString(random);
final String s = _TestUtil.randomRealisticUnicodeString(random());
//final String s = _TestUtil.randomSimpleString(random);
if (s.length() > 0) {
terms.add(new BytesRef(s));
@ -107,16 +107,16 @@ public class TestDocTermOrds extends LuceneTestCase {
final int NUM_DOCS = atLeast(100);
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
// Sometimes swap in codec that impls ord():
if (random.nextInt(10) == 7) {
if (random().nextInt(10) == 7) {
// Make sure terms index has ords:
Codec codec = _TestUtil.alwaysPostingsFormat(PostingsFormat.forName("Lucene40WithOrds"));
conf.setCodec(codec);
}
final RandomIndexWriter w = new RandomIndexWriter(random, dir, conf);
final RandomIndexWriter w = new RandomIndexWriter(random(), dir, conf);
final int[][] idToOrds = new int[NUM_DOCS][];
final Set<Integer> ordsForDocSet = new HashSet<Integer>();
@ -126,9 +126,9 @@ public class TestDocTermOrds extends LuceneTestCase {
doc.add(new IntField("id", id));
final int termCount = _TestUtil.nextInt(random, 0, 20*RANDOM_MULTIPLIER);
final int termCount = _TestUtil.nextInt(random(), 0, 20*RANDOM_MULTIPLIER);
while(ordsForDocSet.size() < termCount) {
ordsForDocSet.add(random.nextInt(termsArray.length));
ordsForDocSet.add(random().nextInt(termsArray.length));
}
final int[] ordsForDoc = new int[termCount];
int upto = 0;
@ -181,12 +181,12 @@ public class TestDocTermOrds extends LuceneTestCase {
MockDirectoryWrapper dir = newDirectory();
final Set<String> prefixes = new HashSet<String>();
final int numPrefix = _TestUtil.nextInt(random, 2, 7);
final int numPrefix = _TestUtil.nextInt(random(), 2, 7);
if (VERBOSE) {
System.out.println("TEST: use " + numPrefix + " prefixes");
}
while(prefixes.size() < numPrefix) {
prefixes.add(_TestUtil.randomRealisticUnicodeString(random));
prefixes.add(_TestUtil.randomRealisticUnicodeString(random()));
//prefixes.add(_TestUtil.randomSimpleString(random));
}
final String[] prefixesArray = prefixes.toArray(new String[prefixes.size()]);
@ -194,7 +194,7 @@ public class TestDocTermOrds extends LuceneTestCase {
final int NUM_TERMS = atLeast(20);
final Set<BytesRef> terms = new HashSet<BytesRef>();
while(terms.size() < NUM_TERMS) {
final String s = prefixesArray[random.nextInt(prefixesArray.length)] + _TestUtil.randomRealisticUnicodeString(random);
final String s = prefixesArray[random().nextInt(prefixesArray.length)] + _TestUtil.randomRealisticUnicodeString(random());
//final String s = prefixesArray[random.nextInt(prefixesArray.length)] + _TestUtil.randomSimpleString(random);
if (s.length() > 0) {
terms.add(new BytesRef(s));
@ -205,15 +205,15 @@ public class TestDocTermOrds extends LuceneTestCase {
final int NUM_DOCS = atLeast(100);
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
// Sometimes swap in codec that impls ord():
if (random.nextInt(10) == 7) {
if (random().nextInt(10) == 7) {
Codec codec = _TestUtil.alwaysPostingsFormat(PostingsFormat.forName("Lucene40WithOrds"));
conf.setCodec(codec);
}
final RandomIndexWriter w = new RandomIndexWriter(random, dir, conf);
final RandomIndexWriter w = new RandomIndexWriter(random(), dir, conf);
final int[][] idToOrds = new int[NUM_DOCS][];
final Set<Integer> ordsForDocSet = new HashSet<Integer>();
@ -223,9 +223,9 @@ public class TestDocTermOrds extends LuceneTestCase {
doc.add(new IntField("id", id));
final int termCount = _TestUtil.nextInt(random, 0, 20*RANDOM_MULTIPLIER);
final int termCount = _TestUtil.nextInt(random(), 0, 20*RANDOM_MULTIPLIER);
while(ordsForDocSet.size() < termCount) {
ordsForDocSet.add(random.nextInt(termsArray.length));
ordsForDocSet.add(random().nextInt(termsArray.length));
}
final int[] ordsForDoc = new int[termCount];
int upto = 0;
@ -302,7 +302,7 @@ public class TestDocTermOrds extends LuceneTestCase {
"field",
prefixRef,
Integer.MAX_VALUE,
_TestUtil.nextInt(random, 2, 10));
_TestUtil.nextInt(random(), 2, 10));
final int[] docIDToID = FieldCache.DEFAULT.getInts(r, "id", false);

View File

@ -18,16 +18,9 @@ package org.apache.lucene.index;
*/
import java.io.Closeable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.*;
import java.util.Map.Entry;
import java.util.Map;
import java.util.Set;
import java.util.*;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.codecs.Codec;
@ -109,30 +102,30 @@ public class TestDocValuesIndexing extends LuceneTestCase {
}
public void testIndexBytesNoDeletes() throws IOException {
runTestIndexBytes(writerConfig(random.nextBoolean()), false);
runTestIndexBytes(writerConfig(random().nextBoolean()), false);
}
public void testIndexBytesDeletes() throws IOException {
runTestIndexBytes(writerConfig(random.nextBoolean()), true);
runTestIndexBytes(writerConfig(random().nextBoolean()), true);
}
public void testIndexNumericsNoDeletes() throws IOException {
runTestNumerics(writerConfig(random.nextBoolean()), false);
runTestNumerics(writerConfig(random().nextBoolean()), false);
}
public void testIndexNumericsDeletes() throws IOException {
runTestNumerics(writerConfig(random.nextBoolean()), true);
runTestNumerics(writerConfig(random().nextBoolean()), true);
}
public void testAddIndexes() throws IOException {
int valuesPerIndex = 10;
List<Type> values = Arrays.asList(Type.values());
Collections.shuffle(values, random);
Collections.shuffle(values, random());
Type first = values.get(0);
Type second = values.get(1);
// index first index
Directory d_1 = newDirectory();
IndexWriter w_1 = new IndexWriter(d_1, writerConfig(random.nextBoolean()));
IndexWriter w_1 = new IndexWriter(d_1, writerConfig(random().nextBoolean()));
indexValues(w_1, valuesPerIndex, first, values, false, 7);
w_1.commit();
assertEquals(valuesPerIndex, w_1.maxDoc());
@ -140,17 +133,17 @@ public class TestDocValuesIndexing extends LuceneTestCase {
// index second index
Directory d_2 = newDirectory();
IndexWriter w_2 = new IndexWriter(d_2, writerConfig(random.nextBoolean()));
IndexWriter w_2 = new IndexWriter(d_2, writerConfig(random().nextBoolean()));
indexValues(w_2, valuesPerIndex, second, values, false, 7);
w_2.commit();
assertEquals(valuesPerIndex, w_2.maxDoc());
_TestUtil.checkIndex(d_2);
Directory target = newDirectory();
IndexWriter w = new IndexWriter(target, writerConfig(random.nextBoolean()));
IndexWriter w = new IndexWriter(target, writerConfig(random().nextBoolean()));
DirectoryReader r_1 = DirectoryReader.open(w_1, true);
DirectoryReader r_2 = DirectoryReader.open(w_2, true);
if (random.nextBoolean()) {
if (random().nextBoolean()) {
w.addIndexes(d_1, d_2);
} else {
w.addIndexes(r_1, r_2);
@ -238,8 +231,8 @@ public class TestDocValuesIndexing extends LuceneTestCase {
private IndexWriterConfig writerConfig(boolean useCompoundFile) {
final IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random));
cfg.setMergePolicy(newLogMergePolicy(random));
new MockAnalyzer(random()));
cfg.setMergePolicy(newLogMergePolicy(random()));
LogMergePolicy policy = new LogDocMergePolicy();
cfg.setMergePolicy(policy);
policy.setUseCompoundFile(useCompoundFile);
@ -255,7 +248,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
final List<Type> numVariantList = new ArrayList<Type>(NUMERICS);
// run in random order to test if fill works correctly during merges
Collections.shuffle(numVariantList, random);
Collections.shuffle(numVariantList, random());
for (Type val : numVariantList) {
FixedBitSet deleted = indexValues(w, numValues, val, numVariantList,
withDeletions, 7);
@ -331,7 +324,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
IndexWriter w = new IndexWriter(d, cfg);
final List<Type> byteVariantList = new ArrayList<Type>(BYTES);
// run in random order to test if fill works correctly during merges
Collections.shuffle(byteVariantList, random);
Collections.shuffle(byteVariantList, random());
final int numValues = 50 + atLeast(10);
for (Type byteIndexValue : byteVariantList) {
List<Closeable> closeables = new ArrayList<Closeable>();
@ -414,11 +407,11 @@ public class TestDocValuesIndexing extends LuceneTestCase {
public void testGetArrayNumerics() throws CorruptIndexException, IOException {
Directory d = newDirectory();
IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriter w = new IndexWriter(d, cfg);
final int numValues = 50 + atLeast(10);
final List<Type> numVariantList = new ArrayList<Type>(NUMERICS);
Collections.shuffle(numVariantList, random);
Collections.shuffle(numVariantList, random());
for (Type val : numVariantList) {
indexValues(w, numValues, val, numVariantList,
false, 7);
@ -502,7 +495,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
public void testGetArrayBytes() throws CorruptIndexException, IOException {
Directory d = newDirectory();
IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random));
new MockAnalyzer(random()));
IndexWriter w = new IndexWriter(d, cfg);
final int numValues = 50 + atLeast(10);
// only single byte fixed straight supports getArray()
@ -542,7 +535,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
@SuppressWarnings("fallthrough")
private Source getSource(DocValues values) throws IOException {
// getSource uses cache internally
switch(random.nextInt(5)) {
switch(random().nextInt(5)) {
case 3:
return values.load();
case 2:
@ -656,17 +649,17 @@ public class TestDocValuesIndexing extends LuceneTestCase {
w.addDocument(doc);
if (i % 7 == 0) {
if (withDeletions && random.nextBoolean()) {
Type val = valueVarList.get(random.nextInt(1 + valueVarList
if (withDeletions && random().nextBoolean()) {
Type val = valueVarList.get(random().nextInt(1 + valueVarList
.indexOf(valueType)));
final int randInt = val == valueType ? random.nextInt(1 + i) : random
final int randInt = val == valueType ? random().nextInt(1 + i) : random()
.nextInt(numValues);
w.deleteDocuments(new Term("id", val.name() + "_" + randInt));
if (val == valueType) {
deleted.set(randInt);
}
}
if (random.nextInt(10) == 0) {
if (random().nextInt(10) == 0) {
w.commit();
}
}
@ -674,7 +667,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
w.commit();
// TODO test multi seg with deletions
if (withDeletions || random.nextBoolean()) {
if (withDeletions || random().nextBoolean()) {
w.forceMerge(1, true);
}
return deleted;
@ -682,7 +675,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
public void testMultiValuedDocValuesField() throws Exception {
Directory d = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random, d);
RandomIndexWriter w = new RandomIndexWriter(random(), d);
Document doc = new Document();
DocValuesField f = new DocValuesField("field", 17, Type.VAR_INTS);
// Index doc values are single-valued so we should not
@ -709,7 +702,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
public void testDifferentTypedDocValuesField() throws Exception {
Directory d = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random, d);
RandomIndexWriter w = new RandomIndexWriter(random(), d);
Document doc = new Document();
// Index doc values are single-valued so we should not
// be able to add same field more than once:
@ -740,17 +733,17 @@ public class TestDocValuesIndexing extends LuceneTestCase {
boolean fixed = type == Type.BYTES_FIXED_SORTED;
final Directory d = newDirectory();
IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random));
new MockAnalyzer(random()));
IndexWriter w = new IndexWriter(d, cfg);
int numDocs = atLeast(100);
BytesRefHash hash = new BytesRefHash();
Map<String, String> docToString = new HashMap<String, String>();
int len = 1 + random.nextInt(50);
int len = 1 + random().nextInt(50);
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
doc.add(newField("id", "" + i, TextField.TYPE_STORED));
String string =fixed ? _TestUtil.randomFixedByteLengthUnicodeString(random,
len) : _TestUtil.randomRealisticUnicodeString(random, 1, len);
String string =fixed ? _TestUtil.randomFixedByteLengthUnicodeString(random(),
len) : _TestUtil.randomRealisticUnicodeString(random(), 1, len);
BytesRef br = new BytesRef(string);
doc.add(new DocValuesField("field", br, type));
hash.add(br);
@ -777,8 +770,8 @@ public class TestDocValuesIndexing extends LuceneTestCase {
Document doc = new Document();
String id = "" + i + numDocs;
doc.add(newField("id", id, TextField.TYPE_STORED));
String string = fixed ? _TestUtil.randomFixedByteLengthUnicodeString(random,
len) : _TestUtil.randomRealisticUnicodeString(random, 1, len);
String string = fixed ? _TestUtil.randomFixedByteLengthUnicodeString(random(),
len) : _TestUtil.randomRealisticUnicodeString(random(), 1, len);
BytesRef br = new BytesRef(string);
hash.add(br);
docToString.put(id, string);
@ -826,6 +819,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
}
public void testWithThreads() throws Exception {
Random random = random();
final int NUM_DOCS = atLeast(100);
final Directory dir = newDirectory();
final RandomIndexWriter writer = new RandomIndexWriter(random, dir);
@ -883,12 +877,13 @@ public class TestDocValuesIndexing extends LuceneTestCase {
final DocValues.Source docIDToID = sr.docValues("id").getSource();
final int NUM_THREADS = _TestUtil.nextInt(random, 1, 10);
final int NUM_THREADS = _TestUtil.nextInt(random(), 1, 10);
Thread[] threads = new Thread[NUM_THREADS];
for(int thread=0;thread<NUM_THREADS;thread++) {
threads[thread] = new Thread() {
@Override
public void run() {
Random random = random();
final DocValues.Source stringDVSource;
final DocValues.Source stringDVDirectSource;
try {
@ -934,7 +929,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
// LUCENE-3870
public void testLengthPrefixAcrossTwoPages() throws Exception {
Directory d = newDirectory();
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document doc = new Document();
byte[] bytes = new byte[32764];
BytesRef b = new BytesRef();
@ -960,4 +955,4 @@ public class TestDocValuesIndexing extends LuceneTestCase {
w.close();
d.close();
}
}
}

View File

@ -38,7 +38,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
@Override
public void setUp() throws Exception {
super.setUp();
fieldName = "field" + random.nextInt();
fieldName = "field" + random().nextInt();
}
/**
@ -46,8 +46,8 @@ public class TestDocsAndPositions extends LuceneTestCase {
*/
public void testPositionsSimple() throws IOException {
Directory directory = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random, directory,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
RandomIndexWriter writer = new RandomIndexWriter(random(), directory,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
for (int i = 0; i < 39; i++) {
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_UNSTORED);
@ -72,7 +72,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
if (atomicReaderContext.reader().maxDoc() == 0) {
continue;
}
final int advance = docsAndPosEnum.advance(random.nextInt(atomicReaderContext.reader().maxDoc()));
final int advance = docsAndPosEnum.advance(random().nextInt(atomicReaderContext.reader().maxDoc()));
do {
String msg = "Advanced to: " + advance + " current doc: "
+ docsAndPosEnum.docID(); // TODO: + " usePayloads: " + usePayload;
@ -104,11 +104,11 @@ public class TestDocsAndPositions extends LuceneTestCase {
*/
public void testRandomPositions() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random, dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
RandomIndexWriter writer = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
int numDocs = atLeast(47);
int max = 1051;
int term = random.nextInt(max);
int term = random().nextInt(max);
Integer[][] positionsInDoc = new Integer[numDocs][];
FieldType customType = new FieldType(TextField.TYPE_UNSTORED);
customType.setOmitNorms(true);
@ -118,7 +118,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
StringBuilder builder = new StringBuilder();
int num = atLeast(131);
for (int j = 0; j < num; j++) {
int nextInt = random.nextInt(max);
int nextInt = random().nextInt(max);
builder.append(nextInt).append(" ");
if (nextInt == term) {
positions.add(Integer.valueOf(j));
@ -148,10 +148,10 @@ public class TestDocsAndPositions extends LuceneTestCase {
int initDoc = 0;
int maxDoc = atomicReaderContext.reader().maxDoc();
// initially advance or do next doc
if (random.nextBoolean()) {
if (random().nextBoolean()) {
initDoc = docsAndPosEnum.nextDoc();
} else {
initDoc = docsAndPosEnum.advance(random.nextInt(maxDoc));
initDoc = docsAndPosEnum.advance(random().nextInt(maxDoc));
}
// now run through the scorer and check if all positions are there...
do {
@ -163,8 +163,8 @@ public class TestDocsAndPositions extends LuceneTestCase {
assertEquals(pos.length, docsAndPosEnum.freq());
// number of positions read should be random - don't read all of them
// allways
final int howMany = random.nextInt(20) == 0 ? pos.length
- random.nextInt(pos.length) : pos.length;
final int howMany = random().nextInt(20) == 0 ? pos.length
- random().nextInt(pos.length) : pos.length;
for (int j = 0; j < howMany; j++) {
assertEquals("iteration: " + i + " initDoc: " + initDoc + " doc: "
+ docID + " base: " + atomicReaderContext.docBase
@ -172,9 +172,9 @@ public class TestDocsAndPositions extends LuceneTestCase {
+ usePayload*/, pos[j].intValue(), docsAndPosEnum.nextPosition());
}
if (random.nextInt(10) == 0) { // once is a while advance
if (random().nextInt(10) == 0) { // once is a while advance
docsAndPosEnum
.advance(docID + 1 + random.nextInt((maxDoc - docID)));
.advance(docID + 1 + random().nextInt((maxDoc - docID)));
}
} while (docsAndPosEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@ -187,11 +187,11 @@ public class TestDocsAndPositions extends LuceneTestCase {
public void testRandomDocs() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random, dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
RandomIndexWriter writer = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
int numDocs = atLeast(49);
int max = 15678;
int term = random.nextInt(max);
int term = random().nextInt(max);
int[] freqInDoc = new int[numDocs];
FieldType customType = new FieldType(TextField.TYPE_UNSTORED);
customType.setOmitNorms(true);
@ -199,7 +199,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
Document doc = new Document();
StringBuilder builder = new StringBuilder();
for (int j = 0; j < 199; j++) {
int nextInt = random.nextInt(max);
int nextInt = random().nextInt(max);
builder.append(nextInt).append(' ');
if (nextInt == term) {
freqInDoc[i]++;
@ -219,7 +219,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
AtomicReaderContext[] leaves = topReaderContext.leaves();
for (AtomicReaderContext context : leaves) {
int maxDoc = context.reader().maxDoc();
DocsEnum docsEnum = _TestUtil.docs(random, context.reader(), fieldName, bytes, null, null, true);
DocsEnum docsEnum = _TestUtil.docs(random(), context.reader(), fieldName, bytes, null, null, true);
if (findNext(freqInDoc, context.docBase, context.docBase + maxDoc) == Integer.MAX_VALUE) {
assertNull(docsEnum);
continue;
@ -230,7 +230,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
if (freqInDoc[context.docBase + j] != 0) {
assertEquals(j, docsEnum.docID());
assertEquals(docsEnum.freq(), freqInDoc[context.docBase +j]);
if (i % 2 == 0 && random.nextInt(10) == 0) {
if (i % 2 == 0 && random().nextInt(10) == 0) {
int next = findNext(freqInDoc, context.docBase+j+1, context.docBase + maxDoc) - context.docBase;
int advancedTo = docsEnum.advance(next);
if (next >= maxDoc) {
@ -267,8 +267,8 @@ public class TestDocsAndPositions extends LuceneTestCase {
*/
public void testLargeNumberOfPositions() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random, dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
RandomIndexWriter writer = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
int howMany = 1000;
FieldType customType = new FieldType(TextField.TYPE_UNSTORED);
customType.setOmitNorms(true);
@ -304,10 +304,10 @@ public class TestDocsAndPositions extends LuceneTestCase {
int initDoc = 0;
int maxDoc = atomicReaderContext.reader().maxDoc();
// initially advance or do next doc
if (random.nextBoolean()) {
if (random().nextBoolean()) {
initDoc = docsAndPosEnum.nextDoc();
} else {
initDoc = docsAndPosEnum.advance(random.nextInt(maxDoc));
initDoc = docsAndPosEnum.advance(random().nextInt(maxDoc));
}
String msg = "Iteration: " + i + " initDoc: " + initDoc; // TODO: + " payloads: " + usePayload;
assertEquals(howMany / 2, docsAndPosEnum.freq());
@ -324,13 +324,13 @@ public class TestDocsAndPositions extends LuceneTestCase {
public void testDocsEnumStart() throws Exception {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random, dir);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
doc.add(newField("foo", "bar", StringField.TYPE_UNSTORED));
writer.addDocument(doc);
DirectoryReader reader = writer.getReader();
AtomicReader r = getOnlySegmentReader(reader);
DocsEnum disi = _TestUtil.docs(random, r, "foo", new BytesRef("bar"), null, null, false);
DocsEnum disi = _TestUtil.docs(random(), r, "foo", new BytesRef("bar"), null, null, false);
int docid = disi.docID();
assertTrue(docid == -1 || docid == DocIdSetIterator.NO_MORE_DOCS);
assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@ -338,7 +338,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
// now reuse and check again
TermsEnum te = r.terms("foo").iterator(null);
assertTrue(te.seekExact(new BytesRef("bar"), true));
disi = _TestUtil.docs(random, te, null, disi, false);
disi = _TestUtil.docs(random(), te, null, disi, false);
docid = disi.docID();
assertTrue(docid == -1 || docid == DocIdSetIterator.NO_MORE_DOCS);
assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@ -349,7 +349,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
public void testDocsAndPositionsEnumStart() throws Exception {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random, dir);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
doc.add(newField("foo", "bar", TextField.TYPE_UNSTORED));
writer.addDocument(doc);

View File

@ -59,13 +59,13 @@ public class TestDocumentWriter extends LuceneTestCase {
public void testAddDocument() throws Exception {
Document testDoc = new Document();
DocHelper.setupDoc(testDoc);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer.addDocument(testDoc);
writer.commit();
SegmentInfo info = writer.newestSegment();
writer.close();
//After adding the document, we should be able to read it back in
SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random()));
assertTrue(reader != null);
Document doc = reader.document(0);
assertTrue(doc != null);
@ -126,7 +126,7 @@ public class TestDocumentWriter extends LuceneTestCase {
writer.commit();
SegmentInfo info = writer.newestSegment();
writer.close();
SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random()));
DocsAndPositionsEnum termPositions = MultiFields.getTermPositionsEnum(reader, MultiFields.getLiveDocs(reader),
"repeated", new BytesRef("repeated"), false);
@ -198,7 +198,7 @@ public class TestDocumentWriter extends LuceneTestCase {
writer.commit();
SegmentInfo info = writer.newestSegment();
writer.close();
SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random()));
DocsAndPositionsEnum termPositions = MultiFields.getTermPositionsEnum(reader, reader.getLiveDocs(), "f1", new BytesRef("a"), false);
assertTrue(termPositions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@ -216,7 +216,7 @@ public class TestDocumentWriter extends LuceneTestCase {
public void testPreAnalyzedField() throws IOException {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document doc = new Document();
doc.add(new TextField("preanalyzed", new TokenStream() {
@ -242,7 +242,7 @@ public class TestDocumentWriter extends LuceneTestCase {
writer.commit();
SegmentInfo info = writer.newestSegment();
writer.close();
SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random()));
DocsAndPositionsEnum termPositions = reader.termPositionsEnum(reader.getLiveDocs(), "preanalyzed", new BytesRef("term1"), false);
assertTrue(termPositions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
@ -280,7 +280,7 @@ public class TestDocumentWriter extends LuceneTestCase {
doc.add(newField("f2", "v2", StringField.TYPE_STORED));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer.addDocument(doc);
writer.close();
@ -320,7 +320,7 @@ public class TestDocumentWriter extends LuceneTestCase {
doc.add(newField("f2", "v2", customType2));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer.addDocument(doc);
writer.forceMerge(1); // be sure to have a single segment
writer.close();

View File

@ -36,10 +36,10 @@ public class TestDocumentsWriterDeleteQueue extends LuceneTestCase {
public void testUpdateDelteSlices() {
DocumentsWriterDeleteQueue queue = new DocumentsWriterDeleteQueue();
final int size = 200 + random.nextInt(500) * RANDOM_MULTIPLIER;
final int size = 200 + random().nextInt(500) * RANDOM_MULTIPLIER;
Integer[] ids = new Integer[size];
for (int i = 0; i < ids.length; i++) {
ids[i] = random.nextInt();
ids[i] = random().nextInt();
}
DeleteSlice slice1 = queue.newSlice();
DeleteSlice slice2 = queue.newSlice();
@ -54,14 +54,14 @@ public class TestDocumentsWriterDeleteQueue extends LuceneTestCase {
Term[] term = new Term[] {new Term("id", i.toString())};
uniqueValues.add(term[0]);
queue.addDelete(term);
if (random.nextInt(20) == 0 || j == ids.length - 1) {
if (random().nextInt(20) == 0 || j == ids.length - 1) {
queue.updateSlice(slice1);
assertTrue(slice1.isTailItem(term));
slice1.apply(bd1, j);
assertAllBetween(last1, j, bd1, ids);
last1 = j + 1;
}
if (random.nextInt(10) == 5 || j == ids.length - 1) {
if (random().nextInt(10) == 5 || j == ids.length - 1) {
queue.updateSlice(slice2);
assertTrue(slice2.isTailItem(term));
slice2.apply(bd2, j);
@ -96,12 +96,12 @@ public class TestDocumentsWriterDeleteQueue extends LuceneTestCase {
assertFalse(queue.anyChanges());
queue.clear();
assertFalse(queue.anyChanges());
final int size = 200 + random.nextInt(500) * RANDOM_MULTIPLIER;
final int size = 200 + random().nextInt(500) * RANDOM_MULTIPLIER;
int termsSinceFreeze = 0;
int queriesSinceFreeze = 0;
for (int i = 0; i < size; i++) {
Term term = new Term("id", "" + i);
if (random.nextInt(10) == 0) {
if (random().nextInt(10) == 0) {
queue.addDelete(new TermQuery(term));
queriesSinceFreeze++;
} else {
@ -109,7 +109,7 @@ public class TestDocumentsWriterDeleteQueue extends LuceneTestCase {
termsSinceFreeze++;
}
assertTrue(queue.anyChanges());
if (random.nextInt(10) == 0) {
if (random().nextInt(10) == 0) {
queue.clear();
queue.tryApplyGlobalSlice();
assertFalse(queue.anyChanges());
@ -120,12 +120,12 @@ public class TestDocumentsWriterDeleteQueue extends LuceneTestCase {
public void testAnyChanges() {
DocumentsWriterDeleteQueue queue = new DocumentsWriterDeleteQueue();
final int size = 200 + random.nextInt(500) * RANDOM_MULTIPLIER;
final int size = 200 + random().nextInt(500) * RANDOM_MULTIPLIER;
int termsSinceFreeze = 0;
int queriesSinceFreeze = 0;
for (int i = 0; i < size; i++) {
Term term = new Term("id", "" + i);
if (random.nextInt(10) == 0) {
if (random().nextInt(10) == 0) {
queue.addDelete(new TermQuery(term));
queriesSinceFreeze++;
} else {
@ -133,7 +133,7 @@ public class TestDocumentsWriterDeleteQueue extends LuceneTestCase {
termsSinceFreeze++;
}
assertTrue(queue.anyChanges());
if (random.nextInt(5) == 0) {
if (random().nextInt(5) == 0) {
FrozenBufferedDeletes freezeGlobalBuffer = queue
.freezeGlobalBuffer(null);
assertEquals(termsSinceFreeze, freezeGlobalBuffer.termCount);
@ -174,15 +174,15 @@ public class TestDocumentsWriterDeleteQueue extends LuceneTestCase {
public void testStressDeleteQueue() throws InterruptedException {
DocumentsWriterDeleteQueue queue = new DocumentsWriterDeleteQueue();
Set<Term> uniqueValues = new HashSet<Term>();
final int size = 10000 + random.nextInt(500) * RANDOM_MULTIPLIER;
final int size = 10000 + random().nextInt(500) * RANDOM_MULTIPLIER;
Integer[] ids = new Integer[size];
for (int i = 0; i < ids.length; i++) {
ids[i] = random.nextInt();
ids[i] = random().nextInt();
uniqueValues.add(new Term("id", ids[i].toString()));
}
CountDownLatch latch = new CountDownLatch(1);
AtomicInteger index = new AtomicInteger(0);
final int numThreads = 2 + random.nextInt(5);
final int numThreads = 2 + random().nextInt(5);
UpdateThread[] threads = new UpdateThread[numThreads];
for (int i = 0; i < threads.length; i++) {
threads[i] = new UpdateThread(queue, index, ids, latch);

View File

@ -68,11 +68,11 @@ public class TestDuelingCodecs extends LuceneTestCase {
// so this would make assertEquals complicated.
leftCodec = Codec.forName("SimpleText");
rightCodec = new RandomCodec(random, false);
rightCodec = new RandomCodec(random(), false);
leftDir = newDirectory();
rightDir = newDirectory();
long seed = random.nextLong();
long seed = random().nextLong();
// must use same seed because of random payloads, etc
Analyzer leftAnalyzer = new MockAnalyzer(new Random(seed));
@ -212,7 +212,7 @@ public class TestDuelingCodecs extends LuceneTestCase {
if (deep) {
int numIntersections = atLeast(3);
for (int i = 0; i < numIntersections; i++) {
String re = AutomatonTestUtil.randomRegexp(random);
String re = AutomatonTestUtil.randomRegexp(random());
CompiledAutomaton automaton = new CompiledAutomaton(new RegExp(re, RegExp.NONE).toAutomaton());
if (automaton.type == CompiledAutomaton.AUTOMATON_TYPE.NORMAL) {
// TODO: test start term too
@ -249,7 +249,7 @@ public class TestDuelingCodecs extends LuceneTestCase {
*/
public void assertTermsEnum(TermsEnum leftTermsEnum, TermsEnum rightTermsEnum, boolean deep) throws Exception {
BytesRef term;
Bits randomBits = new RandomBits(leftReader.maxDoc(), random.nextDouble(), random);
Bits randomBits = new RandomBits(leftReader.maxDoc(), random().nextDouble(), random());
DocsAndPositionsEnum leftPositions = null;
DocsAndPositionsEnum rightPositions = null;
DocsEnum leftDocs = null;
@ -383,13 +383,13 @@ public class TestDuelingCodecs extends LuceneTestCase {
int skipInterval = 16;
while (true) {
if (random.nextBoolean()) {
if (random().nextBoolean()) {
// nextDoc()
docid = leftDocs.nextDoc();
assertEquals(info, docid, rightDocs.nextDoc());
} else {
// advance()
int skip = docid + (int) Math.ceil(Math.abs(skipInterval + random.nextGaussian() * averageGap));
int skip = docid + (int) Math.ceil(Math.abs(skipInterval + random().nextGaussian() * averageGap));
docid = leftDocs.advance(skip);
assertEquals(info, docid, rightDocs.advance(skip));
}
@ -418,13 +418,13 @@ public class TestDuelingCodecs extends LuceneTestCase {
int skipInterval = 16;
while (true) {
if (random.nextBoolean()) {
if (random().nextBoolean()) {
// nextDoc()
docid = leftDocs.nextDoc();
assertEquals(info, docid, rightDocs.nextDoc());
} else {
// advance()
int skip = docid + (int) Math.ceil(Math.abs(skipInterval + random.nextGaussian() * averageGap));
int skip = docid + (int) Math.ceil(Math.abs(skipInterval + random().nextGaussian() * averageGap));
docid = leftDocs.advance(skip);
assertEquals(info, docid, rightDocs.advance(skip));
}

View File

@ -52,7 +52,7 @@ public class TestFieldInfos extends LuceneTestCase {
assertTrue(fieldInfos.size() == DocHelper.all.size()); //this is all b/c we are using the no-arg constructor
IndexOutput output = dir.createOutput(filename, newIOContext(random));
IndexOutput output = dir.createOutput(filename, newIOContext(random()));
assertTrue(output != null);
//Use a RAMOutputStream
@ -120,34 +120,34 @@ public class TestFieldInfos extends LuceneTestCase {
}
try {
readOnly.addOrUpdate("bogus", random.nextBoolean());
readOnly.addOrUpdate("bogus", random().nextBoolean());
fail("instance should be read only");
} catch (IllegalStateException e) {
// expected
}
try {
readOnly.addOrUpdate("bogus", random.nextBoolean(), random.nextBoolean());
readOnly.addOrUpdate("bogus", random().nextBoolean(), random().nextBoolean());
fail("instance should be read only");
} catch (IllegalStateException e) {
// expected
}
try {
readOnly.addOrUpdate("bogus", random.nextBoolean(), random.nextBoolean(),
random.nextBoolean());
readOnly.addOrUpdate("bogus", random().nextBoolean(), random().nextBoolean(),
random().nextBoolean());
fail("instance should be read only");
} catch (IllegalStateException e) {
// expected
}
try {
readOnly.addOrUpdate("bogus", random.nextBoolean(), random.nextBoolean(),
random.nextBoolean(),
random.nextBoolean(), random.nextBoolean() ? IndexOptions.DOCS_ONLY : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, null, null);
readOnly.addOrUpdate("bogus", random().nextBoolean(), random().nextBoolean(),
random().nextBoolean(),
random().nextBoolean(), random().nextBoolean() ? IndexOptions.DOCS_ONLY : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, null, null);
fail("instance should be read only");
} catch (IllegalStateException e) {
// expected
}
try {
readOnly.addOrUpdate(Arrays.asList("a", "b", "c"), random.nextBoolean());
readOnly.addOrUpdate(Arrays.asList("a", "b", "c"), random().nextBoolean());
fail("instance should be read only");
} catch (IllegalStateException e) {
// expected

View File

@ -57,7 +57,7 @@ public class TestFieldsReader extends LuceneTestCase {
DocHelper.setupDoc(testDoc);
_TestUtil.add(testDoc, fieldInfos);
dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy());
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy());
((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(false);
IndexWriter writer = new IndexWriter(dir, conf);
writer.addDocument(testDoc);
@ -195,7 +195,7 @@ public class TestFieldsReader extends LuceneTestCase {
try {
Directory dir = new FaultyFSDirectory(indexDir);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE));
for(int i=0;i<2;i++)
writer.addDocument(testDoc);
writer.forceMerge(1);
@ -232,7 +232,7 @@ public class TestFieldsReader extends LuceneTestCase {
public void testNumericField() throws Exception {
Directory dir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random, dir);
RandomIndexWriter w = new RandomIndexWriter(random(), dir);
final int numDocs = atLeast(500);
final Number[] answers = new Number[numDocs];
final NumericType[] typeAnswers = new NumericType[numDocs];
@ -242,16 +242,16 @@ public class TestFieldsReader extends LuceneTestCase {
final Field sf;
final Number answer;
final NumericType typeAnswer;
if (random.nextBoolean()) {
if (random().nextBoolean()) {
// float/double
if (random.nextBoolean()) {
final float f = random.nextFloat();
if (random().nextBoolean()) {
final float f = random().nextFloat();
answer = Float.valueOf(f);
nf = new FloatField("nf", f);
sf = new StoredField("nf", f);
typeAnswer = NumericType.FLOAT;
} else {
final double d = random.nextDouble();
final double d = random().nextDouble();
answer = Double.valueOf(d);
nf = new DoubleField("nf", d);
sf = new StoredField("nf", d);
@ -259,14 +259,14 @@ public class TestFieldsReader extends LuceneTestCase {
}
} else {
// int/long
if (random.nextBoolean()) {
final int i = random.nextInt();
if (random().nextBoolean()) {
final int i = random().nextInt();
answer = Integer.valueOf(i);
nf = new IntField("nf", i);
sf = new StoredField("nf", i);
typeAnswer = NumericType.INT;
} else {
final long l = random.nextLong();
final long l = random().nextLong();
answer = Long.valueOf(l);
nf = new LongField("nf", l);
sf = new StoredField("nf", l);
@ -302,7 +302,7 @@ public class TestFieldsReader extends LuceneTestCase {
public void testIndexedBit() throws Exception {
Directory dir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random, dir);
RandomIndexWriter w = new RandomIndexWriter(random(), dir);
Document doc = new Document();
FieldType onlyStored = new FieldType();
onlyStored.setStored(true);

View File

@ -129,7 +129,7 @@ public class TestFilterAtomicReader extends LuceneTestCase {
public void testFilterIndexReader() throws Exception {
Directory directory = newDirectory();
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document d1 = new Document();
d1.add(newField("default","one two", TextField.TYPE_STORED));
@ -150,7 +150,7 @@ public class TestFilterAtomicReader extends LuceneTestCase {
// We mess with the postings so this can fail:
((MockDirectoryWrapper) target).setCrossCheckTermVectorsOnClose(false);
writer = new IndexWriter(target, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
writer = new IndexWriter(target, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexReader reader = new TestReader(IndexReader.open(directory));
writer.addIndexes(reader);
writer.close();

View File

@ -33,7 +33,7 @@ public class TestFlex extends LuceneTestCase {
IndexWriter w = new IndexWriter(
d,
new IndexWriterConfig(Version.LUCENE_31, new MockAnalyzer(random)).
new IndexWriterConfig(Version.LUCENE_31, new MockAnalyzer(random())).
setMaxBufferedDocs(7)
);
@ -65,7 +65,7 @@ public class TestFlex extends LuceneTestCase {
public void testTermOrd() throws Exception {
Directory d = newDirectory();
IndexWriter w = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setCodec(_TestUtil.alwaysPostingsFormat(new Lucene40PostingsFormat())));
new MockAnalyzer(random())).setCodec(_TestUtil.alwaysPostingsFormat(new Lucene40PostingsFormat())));
Document doc = new Document();
doc.add(newField("f", "a b c", TextField.TYPE_UNSTORED));
w.addDocument(doc);

View File

@ -39,7 +39,7 @@ public class TestFlushByRamOrCountsPolicy extends LuceneTestCase {
@BeforeClass
public static void beforeClass() throws Exception {
lineDocFile = new LineFileDocs(random, defaultCodecSupportsDocValues());
lineDocFile = new LineFileDocs(random(), defaultCodecSupportsDocValues());
}
@AfterClass
@ -51,14 +51,14 @@ public class TestFlushByRamOrCountsPolicy extends LuceneTestCase {
public void testFlushByRam() throws CorruptIndexException,
LockObtainFailedException, IOException, InterruptedException {
final double ramBuffer = (TEST_NIGHTLY ? 1 : 10) + atLeast(2)
+ random.nextDouble();
runFlushByRam(1 + random.nextInt(TEST_NIGHTLY ? 5 : 1), ramBuffer, false);
+ random().nextDouble();
runFlushByRam(1 + random().nextInt(TEST_NIGHTLY ? 5 : 1), ramBuffer, false);
}
public void testFlushByRamLargeBuffer() throws CorruptIndexException,
LockObtainFailedException, IOException, InterruptedException {
// with a 256 mb ram buffer we should never stall
runFlushByRam(1 + random.nextInt(TEST_NIGHTLY ? 5 : 1), 256.d, true);
runFlushByRam(1 + random().nextInt(TEST_NIGHTLY ? 5 : 1), 256.d, true);
}
protected void runFlushByRam(int numThreads, double maxRamMB,
@ -69,7 +69,7 @@ public class TestFlushByRamOrCountsPolicy extends LuceneTestCase {
Directory dir = newDirectory();
MockDefaultFlushPolicy flushPolicy = new MockDefaultFlushPolicy();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setFlushPolicy(flushPolicy);
new MockAnalyzer(random())).setFlushPolicy(flushPolicy);
final int numDWPT = 1 + atLeast(2);
DocumentsWriterPerThreadPool threadPool = new ThreadAffinityDocumentsWriterThreadPool(
numDWPT);
@ -125,7 +125,7 @@ public class TestFlushByRamOrCountsPolicy extends LuceneTestCase {
Directory dir = newDirectory();
MockDefaultFlushPolicy flushPolicy = new MockDefaultFlushPolicy();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setFlushPolicy(flushPolicy);
new MockAnalyzer(random())).setFlushPolicy(flushPolicy);
final int numDWPT = 1 + atLeast(2);
DocumentsWriterPerThreadPool threadPool = new ThreadAffinityDocumentsWriterThreadPool(
@ -168,16 +168,16 @@ public class TestFlushByRamOrCountsPolicy extends LuceneTestCase {
}
public void testRandom() throws IOException, InterruptedException {
final int numThreads = 1 + random.nextInt(8);
final int numThreads = 1 + random().nextInt(8);
final int numDocumentsToIndex = 50 + atLeast(70);
AtomicInteger numDocs = new AtomicInteger(numDocumentsToIndex);
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random));
new MockAnalyzer(random()));
MockDefaultFlushPolicy flushPolicy = new MockDefaultFlushPolicy();
iwc.setFlushPolicy(flushPolicy);
final int numDWPT = 1 + random.nextInt(8);
final int numDWPT = 1 + random().nextInt(8);
DocumentsWriterPerThreadPool threadPool = new ThreadAffinityDocumentsWriterThreadPool(
numDWPT);
iwc.setIndexerThreadPool(threadPool);
@ -230,15 +230,15 @@ public class TestFlushByRamOrCountsPolicy extends LuceneTestCase {
public void testStallControl() throws InterruptedException,
CorruptIndexException, LockObtainFailedException, IOException {
int[] numThreads = new int[] { 4 + random.nextInt(8), 1 };
final int numDocumentsToIndex = 50 + random.nextInt(50);
int[] numThreads = new int[] { 4 + random().nextInt(8), 1 };
final int numDocumentsToIndex = 50 + random().nextInt(50);
for (int i = 0; i < numThreads.length; i++) {
AtomicInteger numDocs = new AtomicInteger(numDocumentsToIndex);
MockDirectoryWrapper dir = newDirectory();
// mock a very slow harddisk sometimes here so that flushing is very slow
dir.setThrottling(MockDirectoryWrapper.Throttling.SOMETIMES);
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random));
new MockAnalyzer(random()));
iwc.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
iwc.setMaxBufferedDeleteTerms(IndexWriterConfig.DISABLE_AUTO_FLUSH);
FlushPolicy flushPolicy = new FlushByRamOrCountsPolicy();

View File

@ -39,13 +39,13 @@ public class TestForTooMuchCloning extends LuceneTestCase {
final MockDirectoryWrapper dir = newDirectory();
final TieredMergePolicy tmp = new TieredMergePolicy();
tmp.setMaxMergeAtOnce(2);
final RandomIndexWriter w = new RandomIndexWriter(random, dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2).setMergePolicy(tmp));
final RandomIndexWriter w = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2).setMergePolicy(tmp));
final int numDocs = 20;
for(int docs=0;docs<numDocs;docs++) {
StringBuilder sb = new StringBuilder();
for(int terms=0;terms<100;terms++) {
sb.append(_TestUtil.randomRealisticUnicodeString(random));
sb.append(_TestUtil.randomRealisticUnicodeString(random()));
sb.append(' ');
}
final Document doc = new Document();

View File

@ -54,12 +54,12 @@ public class TestForceMergeForever extends LuceneTestCase {
public void test() throws Exception {
final Directory d = newDirectory();
final MyIndexWriter w = new MyIndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
final MyIndexWriter w = new MyIndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
// Try to make an index that requires merging:
w.getConfig().setMaxBufferedDocs(_TestUtil.nextInt(random, 2, 11));
w.getConfig().setMaxBufferedDocs(_TestUtil.nextInt(random(), 2, 11));
final int numStartDocs = atLeast(20);
final LineFileDocs docs = new LineFileDocs(random, defaultCodecSupportsDocValues());
final LineFileDocs docs = new LineFileDocs(random(), defaultCodecSupportsDocValues());
for(int docIDX=0;docIDX<numStartDocs;docIDX++) {
w.addDocument(docs.nextDoc());
}
@ -83,7 +83,7 @@ public class TestForceMergeForever extends LuceneTestCase {
public void run() {
try {
while (!doStop.get()) {
w.updateDocument(new Term("docid", "" + random.nextInt(numStartDocs)),
w.updateDocument(new Term("docid", "" + random().nextInt(numStartDocs)),
docs.nextDoc());
// Force deletes to apply
w.getReader().close();

View File

@ -52,7 +52,7 @@ public class TestIndexFileDeleter extends LuceneTestCase {
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(10).
setMergePolicy(mergePolicy)
);
@ -70,7 +70,7 @@ public class TestIndexFileDeleter extends LuceneTestCase {
// Delete one doc so we get a .del file:
writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES)
);
Term searchTerm = new Term("id", "7");
@ -123,7 +123,7 @@ public class TestIndexFileDeleter extends LuceneTestCase {
// Open & close a writer: it should delete the above 4
// files and nothing more:
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
writer.close();
String[] files2 = dir.listAll();
@ -179,8 +179,8 @@ public class TestIndexFileDeleter extends LuceneTestCase {
}
public void copyFile(Directory dir, String src, String dest) throws IOException {
IndexInput in = dir.openInput(src, newIOContext(random));
IndexOutput out = dir.createOutput(dest, newIOContext(random));
IndexInput in = dir.openInput(src, newIOContext(random()));
IndexOutput out = dir.createOutput(dest, newIOContext(random()));
byte[] b = new byte[1024];
long remainder = in.length();
while(remainder > 0) {

View File

@ -29,6 +29,7 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import java.io.IOException;
import java.util.Random;
public class TestIndexInput extends LuceneTestCase {
@ -85,6 +86,7 @@ public class TestIndexInput extends LuceneTestCase {
@BeforeClass
public static void beforeClass() throws IOException {
Random random = random();
INTS = new int[COUNT];
LONGS = new long[COUNT];
RANDOM_TEST_BYTES = new byte[COUNT * (5 + 4 + 9 + 8)];
@ -177,6 +179,7 @@ public class TestIndexInput extends LuceneTestCase {
// this test checks the raw IndexInput methods as it uses RAMIndexInput which extends IndexInput directly
public void testRawIndexInputRead() throws IOException {
Random random = random();
final RAMDirectory dir = new RAMDirectory();
IndexOutput os = dir.createOutput("foo", newIOContext(random));
os.writeBytes(READ_TEST_BYTES, READ_TEST_BYTES.length);

View File

@ -79,7 +79,7 @@ public class TestIndexWriter extends LuceneTestCase {
try {
IndexWriterConfig.setDefaultWriteLockTimeout(2000);
assertEquals(2000, IndexWriterConfig.getDefaultWriteLockTimeout());
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
} finally {
IndexWriterConfig.setDefaultWriteLockTimeout(savedWriteLockTimeout);
}
@ -92,7 +92,7 @@ public class TestIndexWriter extends LuceneTestCase {
writer.close();
// delete 40 documents
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES));
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES));
for (i = 0; i < 40; i++) {
writer.deleteDocuments(new Term("id", ""+i));
}
@ -103,7 +103,7 @@ public class TestIndexWriter extends LuceneTestCase {
reader.close();
// merge the index down and check that the new doc count is correct
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
assertEquals(60, writer.numDocs());
writer.forceMerge(1);
assertEquals(60, writer.maxDoc());
@ -118,7 +118,7 @@ public class TestIndexWriter extends LuceneTestCase {
// make sure opening a new index for create over
// this existing one works correctly:
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE));
assertEquals(0, writer.maxDoc());
assertEquals(0, writer.numDocs());
writer.close();
@ -144,7 +144,7 @@ public class TestIndexWriter extends LuceneTestCase {
public static void assertNoUnreferencedFiles(Directory dir, String message) throws IOException {
String[] startFiles = dir.listAll();
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))).rollback();
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))).rollback();
String[] endFiles = dir.listAll();
Arrays.sort(startFiles);
@ -173,7 +173,7 @@ public class TestIndexWriter extends LuceneTestCase {
Directory dir = newDirectory();
// add one document & close writer
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
addDoc(writer);
writer.close();
@ -182,7 +182,7 @@ public class TestIndexWriter extends LuceneTestCase {
assertEquals("should be one document", reader.numDocs(), 1);
// now open index for create:
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE));
assertEquals("should be zero documents", writer.maxDoc(), 0);
addDoc(writer);
writer.close();
@ -201,7 +201,7 @@ public class TestIndexWriter extends LuceneTestCase {
IndexWriter writer = null;
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
addDoc(writer);
// close
@ -219,7 +219,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testIndexNoDocuments() throws IOException {
MockDirectoryWrapper dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer.commit();
writer.close();
@ -228,7 +228,7 @@ public class TestIndexWriter extends LuceneTestCase {
assertEquals(0, reader.numDocs());
reader.close();
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
writer.commit();
writer.close();
@ -241,7 +241,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testManyFields() throws IOException {
MockDirectoryWrapper dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(10));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(10));
for(int j=0;j<100;j++) {
Document doc = new Document();
doc.add(newField("a"+j, "aaa" + j, storedTextType));
@ -273,7 +273,7 @@ public class TestIndexWriter extends LuceneTestCase {
MockDirectoryWrapper dir = newDirectory();
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setRAMBufferSizeMB(0.000001).
setMergePolicy(newLogMergePolicy(10))
);
@ -296,7 +296,7 @@ public class TestIndexWriter extends LuceneTestCase {
// maxBufferedDocs in a write session
public void testChangingRAMBuffer() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer.getConfig().setMaxBufferedDocs(10);
writer.getConfig().setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
@ -350,7 +350,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testChangingRAMBuffer2() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer.getConfig().setMaxBufferedDocs(10);
writer.getConfig().setMaxBufferedDeleteTerms(10);
writer.getConfig().setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
@ -411,7 +411,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testDiverseDocs() throws IOException {
MockDirectoryWrapper dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setRAMBufferSizeMB(0.5));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setRAMBufferSizeMB(0.5));
int n = atLeast(1);
for(int i=0;i<n;i++) {
// First, docs where every term is unique (heavy on
@ -419,7 +419,7 @@ public class TestIndexWriter extends LuceneTestCase {
for(int j=0;j<100;j++) {
Document doc = new Document();
for(int k=0;k<100;k++) {
doc.add(newField("field", Integer.toString(random.nextInt()), storedTextType));
doc.add(newField("field", Integer.toString(random().nextInt()), storedTextType));
}
writer.addDocument(doc);
}
@ -460,7 +460,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testEnablingNorms() throws IOException {
MockDirectoryWrapper dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(10));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(10));
// Enable norms for only 1 doc, pre flush
FieldType customType = new FieldType(TextField.TYPE_STORED);
customType.setOmitNorms(true);
@ -486,7 +486,7 @@ public class TestIndexWriter extends LuceneTestCase {
assertEquals(10, hits.length);
reader.close();
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(10));
// Enable norms for only 1 doc, post flush
for(int j=0;j<27;j++) {
@ -517,7 +517,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testHighFreqTerm() throws IOException {
MockDirectoryWrapper dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setRAMBufferSizeMB(0.01));
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setRAMBufferSizeMB(0.01));
// Massive doc that has 128 K a's
StringBuilder b = new StringBuilder(1024*1024);
for(int i=0;i<4096;i++) {
@ -540,7 +540,7 @@ public class TestIndexWriter extends LuceneTestCase {
assertEquals(1, reader.numDocs());
Term t = new Term("field", "a");
assertEquals(1, reader.docFreq(t));
DocsEnum td = _TestUtil.docs(random, reader,
DocsEnum td = _TestUtil.docs(random(), reader,
"field",
new BytesRef("a"),
MultiFields.getLiveDocs(reader),
@ -562,7 +562,7 @@ public class TestIndexWriter extends LuceneTestCase {
final class MyRAMDirectory extends MockDirectoryWrapper {
private LockFactory myLockFactory;
MyRAMDirectory(Directory delegate) {
super(random, delegate);
super(random(), delegate);
lockFactory = null;
myLockFactory = new SingleInstanceLockFactory();
}
@ -574,7 +574,7 @@ public class TestIndexWriter extends LuceneTestCase {
Directory dir = new MyRAMDirectory(new RAMDirectory());
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
for (int i = 0; i < 100; i++) {
addDoc(writer);
}
@ -586,7 +586,7 @@ public class TestIndexWriter extends LuceneTestCase {
assertEquals("did not get right number of hits", 100, hits.length);
reader.close();
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE));
writer.close();
dir.close();
@ -596,7 +596,7 @@ public class TestIndexWriter extends LuceneTestCase {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setMergePolicy(newLogMergePolicy(10))
);
@ -622,7 +622,7 @@ public class TestIndexWriter extends LuceneTestCase {
// empty doc (no norms) and flush
public void testEmptyDocAfterFlushingRealDoc() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
customType.setStoreTermVectors(true);
@ -654,7 +654,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testBadSegment() throws IOException {
Directory dir = newDirectory();
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document document = new Document();
FieldType customType = new FieldType(TextField.TYPE_UNSTORED);
@ -671,7 +671,7 @@ public class TestIndexWriter extends LuceneTestCase {
try {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy());
((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(2);
IndexWriter iw = new IndexWriter(dir, conf);
@ -695,7 +695,7 @@ public class TestIndexWriter extends LuceneTestCase {
if (VERBOSE) {
System.out.println("TEST: iter=" + i);
}
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy()));
//LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
//lmp.setMergeFactor(2);
//lmp.setUseCompoundFile(false);
@ -724,7 +724,7 @@ public class TestIndexWriter extends LuceneTestCase {
writer.close();
if (0 == i % 4) {
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
//LogMergePolicy lmp2 = (LogMergePolicy) writer.getConfig().getMergePolicy();
//lmp2.setUseCompoundFile(false);
writer.forceMerge(1);
@ -738,7 +738,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testUnlimitedMaxFieldLength() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document doc = new Document();
StringBuilder b = new StringBuilder();
@ -761,7 +761,7 @@ public class TestIndexWriter extends LuceneTestCase {
// LUCENE-1179
public void testEmptyFieldName() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document doc = new Document();
doc.add(newField("", "a b c", TextField.TYPE_UNSTORED));
writer.addDocument(doc);
@ -771,7 +771,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testEmptyFieldNameTerms() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document doc = new Document();
doc.add(newField("", "a b c", TextField.TYPE_UNSTORED));
writer.addDocument(doc);
@ -789,7 +789,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testEmptyFieldNameWithEmptyTerm() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document doc = new Document();
doc.add(newField("", "", StringField.TYPE_UNSTORED));
doc.add(newField("", "a", StringField.TYPE_UNSTORED));
@ -835,7 +835,7 @@ public class TestIndexWriter extends LuceneTestCase {
// LUCENE-1222
public void testDoBeforeAfterFlush() throws IOException {
Directory dir = newDirectory();
MockIndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
MockIndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
doc.add(newField("field", "a field", customType));
@ -879,7 +879,7 @@ public class TestIndexWriter extends LuceneTestCase {
};
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document doc = new Document();
doc.add(new TextField("field", tokens));
w.addDocument(doc);
@ -918,7 +918,7 @@ public class TestIndexWriter extends LuceneTestCase {
// LUCENE-1219
public void testBinaryFieldOffsetLength() throws IOException {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
byte[] b = new byte[50];
for(int i=0;i<50;i++)
b[i] = (byte) (i+77);
@ -948,7 +948,7 @@ public class TestIndexWriter extends LuceneTestCase {
// LUCENE-2529
public void testPositionIncrementGapEmptyField() throws Exception {
Directory dir = newDirectory();
MockAnalyzer analyzer = new MockAnalyzer(random);
MockAnalyzer analyzer = new MockAnalyzer(random());
analyzer.setPositionIncrementGap( 100 );
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, analyzer));
@ -993,11 +993,11 @@ public class TestIndexWriter extends LuceneTestCase {
Directory dir = newDirectory();
try {
// Create my own random file:
IndexOutput out = dir.createOutput("myrandomfile", newIOContext(random));
IndexOutput out = dir.createOutput("myrandomfile", newIOContext(random()));
out.writeByte((byte) 42);
out.close();
new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))).close();
new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))).close();
assertTrue(dir.fileExists("myrandomfile"));
} finally {
@ -1007,7 +1007,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testDeadlock() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2));
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
@ -1023,7 +1023,7 @@ public class TestIndexWriter extends LuceneTestCase {
// index has 2 segments
Directory dir2 = newDirectory();
IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer2.addDocument(doc);
writer2.close();
@ -1050,7 +1050,7 @@ public class TestIndexWriter extends LuceneTestCase {
@Override
public void run() {
// LUCENE-2239: won't work with NIOFS/MMAP
Directory dir = new MockDirectoryWrapper(random, new RAMDirectory());
Directory dir = new MockDirectoryWrapper(random(), new RAMDirectory());
IndexWriter w = null;
while(!finish) {
try {
@ -1061,7 +1061,7 @@ public class TestIndexWriter extends LuceneTestCase {
w = null;
}
IndexWriterConfig conf = newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2);
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2);
w = new IndexWriter(dir, conf);
Document doc = new Document();
@ -1173,7 +1173,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testIndexStoreCombos() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
byte[] b = new byte[50];
for(int i=0;i<50;i++)
b[i] = (byte) (i+77);
@ -1229,12 +1229,12 @@ public class TestIndexWriter extends LuceneTestCase {
// test that the terms were indexed.
assertTrue(_TestUtil.docs(random, ir, "binary", new BytesRef("doc1field1"), null, null, false).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(_TestUtil.docs(random, ir, "binary", new BytesRef("doc2field1"), null, null, false).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(_TestUtil.docs(random, ir, "binary", new BytesRef("doc3field1"), null, null, false).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(_TestUtil.docs(random, ir, "string", new BytesRef("doc1field2"), null, null, false).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(_TestUtil.docs(random, ir, "string", new BytesRef("doc2field2"), null, null, false).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(_TestUtil.docs(random, ir, "string", new BytesRef("doc3field2"), null, null, false).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(_TestUtil.docs(random(), ir, "binary", new BytesRef("doc1field1"), null, null, false).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(_TestUtil.docs(random(), ir, "binary", new BytesRef("doc2field1"), null, null, false).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(_TestUtil.docs(random(), ir, "binary", new BytesRef("doc3field1"), null, null, false).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(_TestUtil.docs(random(), ir, "string", new BytesRef("doc1field2"), null, null, false).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(_TestUtil.docs(random(), ir, "string", new BytesRef("doc2field2"), null, null, false).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(_TestUtil.docs(random(), ir, "string", new BytesRef("doc3field2"), null, null, false).nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
ir.close();
dir.close();
@ -1244,7 +1244,7 @@ public class TestIndexWriter extends LuceneTestCase {
// LUCENE-1727: make sure doc fields are stored in order
public void testStoredFieldsOrder() throws Throwable {
Directory d = newDirectory();
IndexWriter w = new IndexWriter(d, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter w = new IndexWriter(d, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document doc = new Document();
FieldType customType = new FieldType();
@ -1279,7 +1279,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testNoDocsIndex() throws Throwable {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
writer.addDocument(new Document());
writer.close();
@ -1289,7 +1289,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testIndexDivisor() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriterConfig config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
config.setTermIndexInterval(2);
IndexWriter w = new IndexWriter(dir, config);
StringBuilder s = new StringBuilder();
@ -1306,7 +1306,7 @@ public class TestIndexWriter extends LuceneTestCase {
TermsEnum t = r.fields().terms("field").iterator(null);
int count = 0;
while(t.next() != null) {
final DocsEnum docs = _TestUtil.docs(random, t, null, null, false);
final DocsEnum docs = _TestUtil.docs(random(), t, null, null, false);
assertEquals(0, docs.nextDoc());
assertEquals(DocIdSetIterator.NO_MORE_DOCS, docs.nextDoc());
count++;
@ -1326,7 +1326,7 @@ public class TestIndexWriter extends LuceneTestCase {
IndexWriter w = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergePolicy(mergePolicy)
);
Document doc = new Document();
@ -1394,7 +1394,7 @@ public class TestIndexWriter extends LuceneTestCase {
Directory dir = newDirectory();
SnapshotDeletionPolicy sdp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setIndexDeletionPolicy(sdp));
// First commit
@ -1434,7 +1434,7 @@ public class TestIndexWriter extends LuceneTestCase {
// then IndexWriter ctor succeeds. Previously (LUCENE-2386) it failed
// when listAll() was called in IndexFileDeleter.
Directory dir = newFSDirectory(_TestUtil.getTempDir("emptyFSDirNoLock"), NoLockFactory.getNoLockFactory());
new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))).close();
new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))).close();
dir.close();
}
@ -1446,7 +1446,7 @@ public class TestIndexWriter extends LuceneTestCase {
// files are left in the Directory.
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy()));
String[] files = dir.listAll();
@ -1493,7 +1493,7 @@ public class TestIndexWriter extends LuceneTestCase {
Directory dir = newDirectory();
dir.setLockFactory(NoLockFactory.getNoLockFactory());
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2));
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
@ -1504,7 +1504,7 @@ public class TestIndexWriter extends LuceneTestCase {
w.addDocument(doc);
w.addDocument(doc);
IndexWriter w2 = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2)
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2)
.setOpenMode(OpenMode.CREATE));
w2.close();
@ -1516,7 +1516,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testNoUnwantedTVFiles() throws Exception {
Directory dir = newDirectory();
IndexWriter indexWriter = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setRAMBufferSizeMB(0.01).setMergePolicy(newLogMergePolicy()));
IndexWriter indexWriter = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setRAMBufferSizeMB(0.01).setMergePolicy(newLogMergePolicy()));
((LogMergePolicy) indexWriter.getConfig().getMergePolicy()).setUseCompoundFile(false);
String BIG="alskjhlaksjghlaksjfhalksvjepgjioefgjnsdfjgefgjhelkgjhqewlrkhgwlekgrhwelkgjhwelkgrhwlkejg";
@ -1609,7 +1609,7 @@ public class TestIndexWriter extends LuceneTestCase {
*/
public void testWickedLongTerm() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random, dir, new StringSplitAnalyzer());
RandomIndexWriter w = new RandomIndexWriter(random(), dir, new StringSplitAnalyzer());
char[] chars = new char[DocumentsWriterPerThread.MAX_TERM_LENGTH_UTF8];
Arrays.fill(chars, 'x');
@ -1659,7 +1659,7 @@ public class TestIndexWriter extends LuceneTestCase {
Field contentField = new Field("content", "", customType);
doc.add(contentField);
w = new RandomIndexWriter(random, dir);
w = new RandomIndexWriter(random(), dir);
contentField.setStringValue("other");
w.addDocument(doc);
@ -1677,7 +1677,7 @@ public class TestIndexWriter extends LuceneTestCase {
w.close();
assertEquals(1, reader.docFreq(new Term("content", bigTerm)));
FieldCache.DocTermsIndex dti = FieldCache.DEFAULT.getTermsIndex(SlowCompositeReaderWrapper.wrap(reader), "content", random.nextBoolean());
FieldCache.DocTermsIndex dti = FieldCache.DEFAULT.getTermsIndex(SlowCompositeReaderWrapper.wrap(reader), "content", random().nextBoolean());
assertEquals(5, dti.numOrd()); // +1 for null ord
assertEquals(4, dti.size());
assertEquals(bigTermBytesRef, dti.lookup(3, new BytesRef()));
@ -1688,7 +1688,7 @@ public class TestIndexWriter extends LuceneTestCase {
// LUCENE-3183
public void testEmptyFieldNameTIIOne() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setTermIndexInterval(1);
iwc.setReaderTermsIndexDivisor(1);
IndexWriter writer = new IndexWriter(dir, iwc);
@ -1701,8 +1701,8 @@ public class TestIndexWriter extends LuceneTestCase {
public void testDeleteAllNRTLeftoverFiles() throws Exception {
Directory d = new MockDirectoryWrapper(random, new RAMDirectory());
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
Directory d = new MockDirectoryWrapper(random(), new RAMDirectory());
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document doc = new Document();
for(int i = 0; i < 20; i++) {
for(int j = 0; j < 100; ++j) {
@ -1724,8 +1724,8 @@ public class TestIndexWriter extends LuceneTestCase {
}
public void testNRTReaderVersion() throws Exception {
Directory d = new MockDirectoryWrapper(random, new RAMDirectory());
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
Directory d = new MockDirectoryWrapper(random(), new RAMDirectory());
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document doc = new Document();
doc.add(newField("id", "0", StringField.TYPE_STORED));
w.addDocument(doc);
@ -1754,10 +1754,10 @@ public class TestIndexWriter extends LuceneTestCase {
// somehow "knows" a lock is held against write.lock
// even if you remove that file:
d.setLockFactory(new SimpleFSLockFactory());
RandomIndexWriter w1 = new RandomIndexWriter(random, d);
RandomIndexWriter w1 = new RandomIndexWriter(random(), d);
w1.deleteAll();
try {
new RandomIndexWriter(random, d, newIndexWriterConfig(TEST_VERSION_CURRENT, null).setWriteLockTimeout(100));
new RandomIndexWriter(random(), d, newIndexWriterConfig(TEST_VERSION_CURRENT, null).setWriteLockTimeout(100));
fail("should not be able to create another writer");
} catch (LockObtainFailedException lofe) {
// expected
@ -1769,7 +1769,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testChangeIndexOptions() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
FieldType docsAndFreqs = new FieldType(TextField.TYPE_UNSTORED);
docsAndFreqs.setIndexOptions(IndexOptions.DOCS_AND_FREQS);
@ -1792,7 +1792,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testOnlyUpdateDocuments() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
final List<Document> docs = new ArrayList<Document>();
docs.add(new Document());
@ -1806,7 +1806,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testPrepareCommitThenClose() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
w.prepareCommit();
try {
@ -1827,7 +1827,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testPrepareCommitThenRollback() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
w.prepareCommit();
w.rollback();
@ -1839,7 +1839,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testPrepareCommitThenRollback2() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
w.commit();
w.addDocument(new Document());

View File

@ -44,7 +44,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
*/
public void testCommitOnClose() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
for (int i = 0; i < 14; i++) {
TestIndexWriter.addDoc(writer);
}
@ -59,7 +59,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
reader = IndexReader.open(dir);
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
for(int i=0;i<3;i++) {
for(int j=0;j<11;j++) {
TestIndexWriter.addDoc(writer);
@ -95,7 +95,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
*/
public void testCommitOnCloseAbort() throws IOException {
MockDirectoryWrapper dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(10));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(10));
for (int i = 0; i < 14; i++) {
TestIndexWriter.addDoc(writer);
}
@ -108,7 +108,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
assertEquals("first number of hits", 14, hits.length);
reader.close();
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
for(int j=0;j<17;j++) {
TestIndexWriter.addDoc(writer);
@ -135,7 +135,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
// Now make sure we can re-open the index, add docs,
// and all is good:
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
// On abort, writer in fact may write to the same
@ -182,7 +182,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
assumeFalse("This test cannot run with Memory codec", idFormat.equals("Memory") || contentFormat.equals("Memory"));
MockDirectoryWrapper dir = newDirectory();
Analyzer analyzer;
if (random.nextBoolean()) {
if (random().nextBoolean()) {
// no payloads
analyzer = new Analyzer() {
@Override
@ -192,12 +192,12 @@ public class TestIndexWriterCommit extends LuceneTestCase {
};
} else {
// fixed length payloads
final int length = random.nextInt(200);
final int length = random().nextInt(200);
analyzer = new Analyzer() {
@Override
public TokenStreamComponents createComponents(String fieldName, Reader reader) {
Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
return new TokenStreamComponents(tokenizer, new MockFixedLengthPayloadFilter(random, tokenizer, length));
return new TokenStreamComponents(tokenizer, new MockFixedLengthPayloadFilter(random(), tokenizer, length));
}
};
}
@ -266,7 +266,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
dir.setPreventDoubleWrite(false);
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(10).
setMergePolicy(newLogMergePolicy(10))
);
@ -275,7 +275,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
}
writer.close();
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
writer.forceMerge(1);
// Open a reader before closing (commiting) the writer:
@ -300,7 +300,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
if (VERBOSE) {
System.out.println("TEST: do real full merge");
}
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
writer.forceMerge(1);
writer.close();
@ -325,8 +325,8 @@ public class TestIndexWriterCommit extends LuceneTestCase {
final int NUM_THREADS = 5;
final double RUN_SEC = 0.5;
final Directory dir = newDirectory();
final RandomIndexWriter w = new RandomIndexWriter(random, dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
final RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
_TestUtil.reduceOpenFiles(w.w);
w.commit();
final AtomicBoolean failed = new AtomicBoolean();
@ -381,7 +381,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setMergePolicy(newLogMergePolicy(5))
);
@ -418,7 +418,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
public void testFutureCommit() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE));
Document doc = new Document();
w.addDocument(doc);
@ -444,7 +444,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
assertNotNull(commit);
w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE).setIndexCommit(commit));
w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE).setIndexCommit(commit));
assertEquals(1, w.numDocs());
@ -473,7 +473,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
// changed since LUCENE-2386, where before IW would always commit on a fresh
// new index.
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
try {
DirectoryReader.listCommits(dir);
fail("listCommits should have thrown an exception over empty index");
@ -492,7 +492,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setMergePolicy(newLogMergePolicy(5))
);
@ -549,7 +549,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setMergePolicy(newLogMergePolicy(5))
);
@ -575,7 +575,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
reader.close();
reader2.close();
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
for (int i = 0; i < 17; i++)
TestIndexWriter.addDoc(writer);
@ -601,7 +601,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
public void testPrepareCommitNoChanges() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer.prepareCommit();
writer.commit();
writer.close();
@ -615,7 +615,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
// LUCENE-1382
public void testCommitUserData() throws IOException {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2));
for(int j=0;j<17;j++)
TestIndexWriter.addDoc(w);
w.close();
@ -625,7 +625,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
assertEquals(0, r.getIndexCommit().getUserData().size());
r.close();
w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2));
for(int j=0;j<17;j++)
TestIndexWriter.addDoc(w);
Map<String,String> data = new HashMap<String,String>();
@ -637,7 +637,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
assertEquals("test1", r.getIndexCommit().getUserData().get("label"));
r.close();
w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
w.forceMerge(1);
w.close();

View File

@ -51,7 +51,7 @@ public class TestIndexWriterConfig extends LuceneTestCase {
@Test
public void testDefaults() throws Exception {
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
assertEquals(MockAnalyzer.class, conf.getAnalyzer().getClass());
assertNull(conf.getIndexCommit());
assertEquals(KeepOnlyLastCommitDeletionPolicy.class, conf.getIndexDeletionPolicy().getClass());
@ -138,7 +138,7 @@ public class TestIndexWriterConfig extends LuceneTestCase {
@Test
public void testToString() throws Exception {
String str = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).toString();
String str = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).toString();
for (Field f : IndexWriterConfig.class.getDeclaredFields()) {
int modifiers = f.getModifiers();
if (Modifier.isStatic(modifiers) && Modifier.isFinal(modifiers)) {
@ -155,7 +155,7 @@ public class TestIndexWriterConfig extends LuceneTestCase {
@Test
public void testClone() throws Exception {
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig clone = conf.clone();
// Clone is shallow since not all parameters are cloneable.
@ -167,7 +167,7 @@ public class TestIndexWriterConfig extends LuceneTestCase {
@Test
public void testInvalidValues() throws Exception {
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
// Test IndexDeletionPolicy
assertEquals(KeepOnlyLastCommitDeletionPolicy.class, conf.getIndexDeletionPolicy().getClass());

View File

@ -52,7 +52,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1));
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1));
FieldType custom1 = new FieldType();
custom1.setStored(true);
@ -91,7 +91,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
int id = 0;
int value = 100;
@ -124,7 +124,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
public void testMaxBufferedDeletes() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1));
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1));
writer.addDocument(new Document());
writer.deleteDocuments(new Term("foobar", "1"));
@ -143,7 +143,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
}
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(4)
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(4)
.setMaxBufferedDeleteTerms(4));
int id = 0;
int value = 100;
@ -181,7 +181,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
public void testBothDeletes() throws IOException {
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(100)
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(100)
.setMaxBufferedDeleteTerms(100));
int id = 0;
@ -215,7 +215,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
public void testBatchDeletes() throws IOException {
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
int id = 0;
@ -258,7 +258,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
public void testDeleteAll() throws IOException {
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
int id = 0;
@ -304,7 +304,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
public void testDeleteAllRollback() throws IOException {
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
int id = 0;
@ -341,7 +341,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
public void testDeleteAllNRT() throws IOException {
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
int id = 0;
@ -429,7 +429,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
MockDirectoryWrapper startDir = newDirectory();
// TODO: find the resource leak that only occurs sometimes here.
startDir.setNoDeleteOpenFile(false);
IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
for (int i = 0; i < 157; i++) {
Document d = new Document();
d.add(newField("id", Integer.toString(i), StringField.TYPE_STORED));
@ -450,11 +450,11 @@ public class TestIndexWriterDelete extends LuceneTestCase {
if (VERBOSE) {
System.out.println("TEST: cycle");
}
MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory(startDir, newIOContext(random)));
MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new RAMDirectory(startDir, newIOContext(random())));
dir.setPreventDoubleWrite(false);
IndexWriter modifier = new IndexWriter(dir,
newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false))
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))
.setMaxBufferedDocs(1000)
.setMaxBufferedDeleteTerms(1000)
.setMergeScheduler(new ConcurrentMergeScheduler()));
@ -691,7 +691,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
MockDirectoryWrapper dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(2).setReaderPooling(false).setMergePolicy(newLogMergePolicy()));
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(2).setReaderPooling(false).setMergePolicy(newLogMergePolicy()));
LogMergePolicy lmp = (LogMergePolicy) modifier.getConfig().getMergePolicy();
lmp.setUseCompoundFile(true);
@ -815,7 +815,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
String[] text = { "Amsterdam", "Venice" };
MockDirectoryWrapper dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
modifier.commit();
dir.failOn(failure.reset());
@ -845,7 +845,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
public void testDeleteNullQuery() throws IOException {
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
for (int i = 0; i < 5; i++) {
addDoc(modifier, i, 2*i);
@ -860,23 +860,23 @@ public class TestIndexWriterDelete extends LuceneTestCase {
public void testDeleteAllSlowly() throws Exception {
final Directory dir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random, dir);
RandomIndexWriter w = new RandomIndexWriter(random(), dir);
final int NUM_DOCS = atLeast(1000);
final List<Integer> ids = new ArrayList<Integer>(NUM_DOCS);
for(int id=0;id<NUM_DOCS;id++) {
ids.add(id);
}
Collections.shuffle(ids, random);
Collections.shuffle(ids, random());
for(int id : ids) {
Document doc = new Document();
doc.add(newField("id", ""+id, StringField.TYPE_UNSTORED));
w.addDocument(doc);
}
Collections.shuffle(ids, random);
Collections.shuffle(ids, random());
int upto = 0;
while(upto < ids.size()) {
final int left = ids.size() - upto;
final int inc = Math.min(left, _TestUtil.nextInt(random, 1, 20));
final int inc = Math.min(left, _TestUtil.nextInt(random(), 1, 20));
final int limit = upto + inc;
while(upto < limit) {
w.deleteDocuments(new Term("id", ""+ids.get(upto++)));
@ -894,7 +894,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
final String fieldFormat = _TestUtil.getPostingsFormat("field");
assumeFalse("This test cannot run with Memory codec", fieldFormat.equals("Memory"));
assumeFalse("This test cannot run with SimpleText codec", fieldFormat.equals("SimpleText"));
final Random r = random;
final Random r = random();
Directory dir = newDirectory();
// note this test explicitly disables payloads
final Analyzer analyzer = new Analyzer() {
@ -944,7 +944,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// ever call commit() for this test:
// note: tiny rambuffer used, as with a 1MB buffer the test is too slow (flush @ 128,999)
IndexWriter w = new IndexWriter(dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setRAMBufferSizeMB(0.1f).setMaxBufferedDocs(1000).setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES).setReaderPooling(false));
int count = 0;
while(true) {
@ -990,7 +990,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// ever call commit() for this test:
final int flushAtDelCount = atLeast(1020);
IndexWriter w = new IndexWriter(dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDeleteTerms(flushAtDelCount).setMaxBufferedDocs(1000).setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES).setReaderPooling(false));
int count = 0;
while(true) {
@ -1031,7 +1031,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
final AtomicBoolean closing = new AtomicBoolean();
final AtomicBoolean sawAfterFlush = new AtomicBoolean();
IndexWriter w = new IndexWriter(dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setRAMBufferSizeMB(0.5).setMaxBufferedDocs(-1).setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES).setReaderPooling(false)) {
@Override
public void doAfterFlush() {
@ -1044,7 +1044,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
while(true) {
StringBuilder sb = new StringBuilder();
for(int termIDX=0;termIDX<100;termIDX++) {
sb.append(' ').append(_TestUtil.randomRealisticUnicodeString(random));
sb.append(' ').append(_TestUtil.randomRealisticUnicodeString(random()));
}
if (id == 500) {
w.deleteDocuments(new Term("id", "0"));

View File

@ -116,7 +116,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
IndexWriter writer;
final Random r = new Random(random.nextLong());
final Random r = new Random(random().nextLong());
volatile Throwable failure;
public IndexerThread(int i, IndexWriter writer) {
@ -198,7 +198,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
ThreadLocal<Thread> doFail = new ThreadLocal<Thread>();
private class MockIndexWriter extends IndexWriter {
Random r = new Random(random.nextLong());
Random r = new Random(random().nextLong());
public MockIndexWriter(Directory dir, IndexWriterConfig conf) throws IOException {
super(dir, conf);
@ -223,7 +223,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}
MockDirectoryWrapper dir = newDirectory();
MockAnalyzer analyzer = new MockAnalyzer(random);
MockAnalyzer analyzer = new MockAnalyzer(random());
analyzer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
MockIndexWriter writer = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer)
.setRAMBufferSizeMB(0.1).setMergeScheduler(new ConcurrentMergeScheduler()));
@ -266,7 +266,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
public void testRandomExceptionsThreads() throws Throwable {
MockDirectoryWrapper dir = newDirectory();
MockAnalyzer analyzer = new MockAnalyzer(random);
MockAnalyzer analyzer = new MockAnalyzer(random());
analyzer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
MockIndexWriter writer = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer)
.setRAMBufferSizeMB(0.2).setMergeScheduler(new ConcurrentMergeScheduler()));
@ -354,7 +354,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
public void testExceptionDocumentsWriterInit() throws IOException {
Directory dir = newDirectory();
MockIndexWriter2 w = new MockIndexWriter2(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
MockIndexWriter2 w = new MockIndexWriter2(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document doc = new Document();
doc.add(newField("field", "a field", TextField.TYPE_STORED));
w.addDocument(doc);
@ -372,7 +372,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
// LUCENE-1208
public void testExceptionJustBeforeFlush() throws IOException {
Directory dir = newDirectory();
MockIndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
MockIndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2));
Document doc = new Document();
doc.add(newField("field", "a field", TextField.TYPE_STORED));
w.addDocument(doc);
@ -422,7 +422,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
// LUCENE-1210
public void testExceptionOnMergeInit() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(2).setMergeScheduler(new ConcurrentMergeScheduler()).setMergePolicy(newLogMergePolicy());
((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(2);
MockIndexWriter3 w = new MockIndexWriter3(dir, conf);
@ -501,7 +501,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
// Make sure the doc that hit the exception was marked
// as deleted:
DocsEnum tdocs = _TestUtil.docs(random, reader,
DocsEnum tdocs = _TestUtil.docs(random(), reader,
t.field(),
new BytesRef(t.text()),
MultiFields.getLiveDocs(reader),
@ -561,7 +561,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
failure.setDoFail();
dir.failOn(failure);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2));
Document doc = new Document();
String contents = "aa bb cc dd ee ff gg hh ii jj kk";
doc.add(newField("content", contents, TextField.TYPE_UNSTORED));
@ -698,7 +698,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
final IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(-1)
.setMergePolicy(
random.nextBoolean() ? NoMergePolicy.COMPOUND_FILES
random().nextBoolean() ? NoMergePolicy.COMPOUND_FILES
: NoMergePolicy.NO_COMPOUND_FILES));
// don't use a merge policy here they depend on the DWPThreadPool and its max thread states etc.
final int finalI = i;
@ -824,7 +824,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setMergeScheduler(new ConcurrentMergeScheduler()).
setMergePolicy(newLogMergePolicy(5))
@ -907,7 +907,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
MockDirectoryWrapper dir = newDirectory();
dir.setFailOnCreateOutput(false);
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document doc = new Document();
doc.add(newField("field", "a field", TextField.TYPE_STORED));
w.addDocument(doc);
@ -929,7 +929,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
public void testForceMergeExceptions() throws IOException {
Directory startDir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy());
IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy());
((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(100);
IndexWriter w = new IndexWriter(startDir, conf);
for(int i=0;i<27;i++)
@ -941,8 +941,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
if (VERBOSE) {
System.out.println("TEST: iter " + i);
}
MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory(startDir, newIOContext(random)));
conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergeScheduler(new ConcurrentMergeScheduler());
MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new RAMDirectory(startDir, newIOContext(random())));
conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergeScheduler(new ConcurrentMergeScheduler());
((ConcurrentMergeScheduler) conf.getMergeScheduler()).setSuppressExceptions();
w = new IndexWriter(dir, conf);
dir.setRandomIOExceptionRate(0.5);
@ -965,7 +965,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
final AtomicBoolean thrown = new AtomicBoolean(false);
final Directory dir = newDirectory();
final IndexWriter writer = new IndexWriter(dir,
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setInfoStream(new InfoStream() {
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setInfoStream(new InfoStream() {
@Override
public void message(String component, final String message) {
if (message.startsWith("now flush at close") && thrown.compareAndSet(false, true)) {
@ -1013,7 +1013,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
// LUCENE-1347
public void testRollbackExceptionHang() throws Throwable {
Directory dir = newDirectory();
MockIndexWriter4 w = new MockIndexWriter4(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
MockIndexWriter4 w = new MockIndexWriter4(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
addDoc(w);
w.doFail = true;
@ -1035,7 +1035,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
IndexWriter writer = null;
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
// add 100 documents
for (int i = 0; i < 100; i++) {
@ -1049,8 +1049,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
assertTrue("segment generation should be > 0 but got " + gen, gen > 0);
final String segmentsFileName = SegmentInfos.getLastCommitSegmentsFileName(dir);
IndexInput in = dir.openInput(segmentsFileName, newIOContext(random));
IndexOutput out = dir.createOutput(IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", 1+gen), newIOContext(random));
IndexInput in = dir.openInput(segmentsFileName, newIOContext(random()));
IndexOutput out = dir.createOutput(IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", 1+gen), newIOContext(random()));
out.copyBytes(in, in.length()-1);
byte b = in.readByte();
out.writeByte((byte) (1+b));
@ -1077,7 +1077,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
IndexWriter writer = null;
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
// add 100 documents
for (int i = 0; i < 100; i++) {
@ -1094,8 +1094,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
String fileNameOut = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS,
"",
1+gen);
IndexInput in = dir.openInput(fileNameIn, newIOContext(random));
IndexOutput out = dir.createOutput(fileNameOut, newIOContext(random));
IndexInput in = dir.openInput(fileNameIn, newIOContext(random()));
IndexOutput out = dir.createOutput(fileNameOut, newIOContext(random()));
long length = in.length();
for(int i=0;i<length-1;i++) {
out.writeByte(in.readByte());
@ -1126,7 +1126,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergePolicy(newLogMergePolicy(true))
);
((LogMergePolicy) writer.getConfig().getMergePolicy()).setNoCFSRatio(1.0);
@ -1175,7 +1175,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
IndexWriter writer = null;
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
// add 100 documents
for (int i = 0; i < 100; i++) {
@ -1195,8 +1195,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
String fileNameOut = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS,
"",
1+gen);
IndexInput in = dir.openInput(fileNameIn, newIOContext(random));
IndexOutput out = dir.createOutput(fileNameOut, newIOContext(random));
IndexInput in = dir.openInput(fileNameIn, newIOContext(random()));
IndexOutput out = dir.createOutput(fileNameOut, newIOContext(random()));
long length = in.length();
for(int i=0;i<length-1;i++) {
out.writeByte(in.readByte());
@ -1213,7 +1213,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
reader.close();
try {
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE));
} catch (Exception e) {
e.printStackTrace(System.out);
fail("writer failed to open on a crashed index");
@ -1238,12 +1238,12 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
for (FailOnTermVectors failure : failures) {
MockDirectoryWrapper dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
dir.failOn(failure);
int numDocs = 10 + random.nextInt(30);
int numDocs = 10 + random().nextInt(30);
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
Field field = newField(random, "field", "a field", TextField.TYPE_STORED);
Field field = newField(random(), "field", "a field", TextField.TYPE_STORED);
doc.add(field);
// random TV
try {
@ -1252,7 +1252,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
} catch (RuntimeException e) {
assertTrue(e.getMessage().startsWith(FailOnTermVectors.EXC_MSG));
}
if (random.nextInt(20) == 0) {
if (random().nextInt(20) == 0) {
w.commit();
_TestUtil.checkIndex(dir);
}
@ -1264,7 +1264,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
Field field = newField(random, "field", "a field", TextField.TYPE_STORED);
Field field = newField(random(), "field", "a field", TextField.TYPE_STORED);
doc.add(field);
// random TV
try {
@ -1273,7 +1273,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
} catch (RuntimeException e) {
assertTrue(e.getMessage().startsWith(FailOnTermVectors.EXC_MSG));
}
if (random.nextInt(20) == 0) {
if (random().nextInt(20) == 0) {
w.commit();
_TestUtil.checkIndex(dir);
}
@ -1326,8 +1326,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
public void testAddDocsNonAbortingException() throws Exception {
final Directory dir = newDirectory();
final RandomIndexWriter w = new RandomIndexWriter(random, dir);
final int numDocs1 = random.nextInt(25);
final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
final int numDocs1 = random().nextInt(25);
for(int docCount=0;docCount<numDocs1;docCount++) {
Document doc = new Document();
doc.add(newField("content", "good content", TextField.TYPE_UNSTORED));
@ -1357,7 +1357,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
assertEquals(CRASH_FAIL_MESSAGE, ioe.getMessage());
}
final int numDocs2 = random.nextInt(25);
final int numDocs2 = random().nextInt(25);
for(int docCount=0;docCount<numDocs2;docCount++) {
Document doc = new Document();
doc.add(newField("content", "good content", TextField.TYPE_UNSTORED));
@ -1384,8 +1384,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
public void testUpdateDocsNonAbortingException() throws Exception {
final Directory dir = newDirectory();
final RandomIndexWriter w = new RandomIndexWriter(random, dir);
final int numDocs1 = random.nextInt(25);
final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
final int numDocs1 = random().nextInt(25);
for(int docCount=0;docCount<numDocs1;docCount++) {
Document doc = new Document();
doc.add(newField("content", "good content", TextField.TYPE_UNSTORED));
@ -1394,7 +1394,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
// Use addDocs (no exception) to get docs in the index:
final List<Document> docs = new ArrayList<Document>();
final int numDocs2 = random.nextInt(25);
final int numDocs2 = random().nextInt(25);
for(int docCount=0;docCount<numDocs2;docCount++) {
Document doc = new Document();
docs.add(doc);
@ -1404,7 +1404,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}
w.addDocuments(docs);
final int numDocs3 = random.nextInt(25);
final int numDocs3 = random().nextInt(25);
for(int docCount=0;docCount<numDocs3;docCount++) {
Document doc = new Document();
doc.add(newField("content", "good content", TextField.TYPE_UNSTORED));
@ -1412,8 +1412,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}
docs.clear();
final int limit = _TestUtil.nextInt(random, 2, 25);
final int crashAt = random.nextInt(limit);
final int limit = _TestUtil.nextInt(random(), 2, 25);
final int crashAt = random().nextInt(limit);
for(int docCount=0;docCount<limit;docCount++) {
Document doc = new Document();
docs.add(doc);
@ -1437,7 +1437,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
assertEquals(CRASH_FAIL_MESSAGE, ioe.getMessage());
}
final int numDocs4 = random.nextInt(25);
final int numDocs4 = random().nextInt(25);
for(int docCount=0;docCount<numDocs4;docCount++) {
Document doc = new Document();
doc.add(newField("content", "good content", TextField.TYPE_UNSTORED));
@ -1480,7 +1480,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
public void testExceptionOnCtor() throws Exception {
UOEDirectory uoe = new UOEDirectory();
Directory d = new MockDirectoryWrapper(random, uoe);
Directory d = new MockDirectoryWrapper(random(), uoe);
IndexWriter iw = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, null));
iw.addDocument(new Document());
iw.close();
@ -1524,7 +1524,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
Document doc = new Document();
Token t1 = new Token("foo", 0, 3);
t1.setPositionIncrement(Integer.MAX_VALUE-500);
if (random.nextBoolean()) {
if (random().nextBoolean()) {
t1.setPayload(new Payload(new byte[] { 0x1 } ));
}
TokenStream overflowingTokenStream = new CannedTokenStream(

View File

@ -36,12 +36,12 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
final Document doc = new Document();
doc.add(newField("content", "aaa", StringField.TYPE_UNSTORED));
final int incrMin = TEST_NIGHTLY ? 15 : 40;
for(int numDocs=10;numDocs<500;numDocs += _TestUtil.nextInt(random, incrMin, 5*incrMin)) {
for(int numDocs=10;numDocs<500;numDocs += _TestUtil.nextInt(random(), incrMin, 5*incrMin)) {
LogDocMergePolicy ldmp = new LogDocMergePolicy();
ldmp.setMinMergeDocs(1);
ldmp.setMergeFactor(5);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(2).setMergePolicy(
ldmp));
for(int j=0;j<numDocs;j++)
@ -55,7 +55,7 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
ldmp = new LogDocMergePolicy();
ldmp.setMergeFactor(5);
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setMergePolicy(ldmp));
new MockAnalyzer(random())).setMergePolicy(ldmp));
writer.forceMerge(3);
writer.close();
@ -81,7 +81,7 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
ldmp.setMinMergeDocs(1);
ldmp.setMergeFactor(4);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(2).setMergePolicy(ldmp).setMergeScheduler(new ConcurrentMergeScheduler()));
for(int iter=0;iter<10;iter++) {
@ -122,7 +122,7 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
public void testForceMergeTempSpaceUsage() throws IOException {
MockDirectoryWrapper dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(10).setMergePolicy(newLogMergePolicy()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(10).setMergePolicy(newLogMergePolicy()));
if (VERBOSE) {
System.out.println("TEST: config1=" + writer.getConfig());
}
@ -155,7 +155,7 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
// Import to use same term index interval else a
// smaller one here could increase the disk usage and
// cause a false failure:
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setTermIndexInterval(termIndexInterval).setMergePolicy(newLogMergePolicy()));
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND).setTermIndexInterval(termIndexInterval).setMergePolicy(newLogMergePolicy()));
writer.forceMerge(1);
writer.close();
long maxDiskUsage = dir.getMaxUsedSizeInBytes();
@ -173,7 +173,7 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
for(int pass=0;pass<2;pass++) {
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setOpenMode(OpenMode.CREATE).
setMaxBufferedDocs(2).
setMergePolicy(newLogMergePolicy(51))

View File

@ -76,10 +76,10 @@ public class TestIndexWriterLockRelease extends LuceneTestCase {
public void testIndexWriterLockRelease() throws IOException {
Directory dir = newFSDirectory(this.__test_dir);
try {
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
} catch (FileNotFoundException e) {
try {
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
} catch (FileNotFoundException e1) {
}
} finally {

View File

@ -34,7 +34,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(10).setMergePolicy(new LogDocMergePolicy()));
for (int i = 0; i < 100; i++) {
@ -51,7 +51,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(10).setMergePolicy(new LogDocMergePolicy()));
boolean noOverMerge = false;
@ -76,7 +76,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
mp.setMinMergeDocs(100);
mp.setMergeFactor(10);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(10).setMergePolicy(mp));
for (int i = 0; i < 100; i++) {
@ -86,7 +86,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
mp = new LogDocMergePolicy();
mp.setMergeFactor(10);
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setOpenMode(
new MockAnalyzer(random())).setOpenMode(
OpenMode.APPEND).setMaxBufferedDocs(10).setMergePolicy(mp));
mp.setMinMergeDocs(100);
checkInvariants(writer);
@ -102,7 +102,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(10).
setMergePolicy(newLogMergePolicy()).
setMergeScheduler(new SerialMergeScheduler())
@ -131,7 +131,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(101).setMergePolicy(new LogDocMergePolicy())
.setMergeScheduler(new SerialMergeScheduler()));
@ -145,7 +145,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
writer.close();
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setOpenMode(
new MockAnalyzer(random())).setOpenMode(
OpenMode.APPEND).setMaxBufferedDocs(101).setMergePolicy(new LogDocMergePolicy())
.setMergeScheduler(new SerialMergeScheduler()));
}
@ -154,7 +154,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
LogDocMergePolicy ldmp = new LogDocMergePolicy();
ldmp.setMergeFactor(10);
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setOpenMode(
new MockAnalyzer(random())).setOpenMode(
OpenMode.APPEND).setMaxBufferedDocs(10).setMergePolicy(ldmp).setMergeScheduler(new SerialMergeScheduler()));
// merge policy only fixes segments on levels where merges
@ -183,7 +183,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
LogDocMergePolicy ldmp = new LogDocMergePolicy();
ldmp.setMergeFactor(100);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(10).setMergePolicy(ldmp));
for (int i = 0; i < 250; i++) {
@ -195,7 +195,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
// delete some docs without merging
writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES)
);
writer.deleteDocuments(new Term("content", "aaa"));
@ -204,7 +204,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
ldmp = new LogDocMergePolicy();
ldmp.setMergeFactor(5);
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setOpenMode(
new MockAnalyzer(random())).setOpenMode(
OpenMode.APPEND).setMaxBufferedDocs(10).setMergePolicy(ldmp).setMergeScheduler(new ConcurrentMergeScheduler()));
// merge factor is changed, so check invariants after all adds

View File

@ -44,14 +44,14 @@ public class TestIndexWriterMerging extends LuceneTestCase
Directory indexA = newDirectory();
Directory indexB = newDirectory();
fillIndex(random, indexA, 0, num);
fillIndex(random(), indexA, 0, num);
boolean fail = verifyIndex(indexA, 0);
if (fail)
{
fail("Index a is invalid");
}
fillIndex(random, indexB, num, num);
fillIndex(random(), indexB, num, num);
fail = verifyIndex(indexB, num);
if (fail)
{
@ -62,7 +62,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
IndexWriter writer = new IndexWriter(
merged,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergePolicy(newLogMergePolicy(2))
);
writer.addIndexes(indexA, indexB);
@ -123,7 +123,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
public void testForceMergeDeletes() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(2).setRAMBufferSizeMB(
IndexWriterConfig.DISABLE_AUTO_FLUSH));
Document document = new Document();
@ -154,7 +154,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
assertEquals(10, ir.numDocs());
ir.close();
IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.COMPOUND_FILES);
writer = new IndexWriter(dir, dontMergeConfig);
writer.deleteDocuments(new Term("id", "0"));
@ -165,7 +165,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
assertEquals(8, ir.numDocs());
ir.close();
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
assertEquals(8, writer.numDocs());
assertEquals(10, writer.maxDoc());
writer.forceMergeDeletes();
@ -183,7 +183,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).
setMergePolicy(newLogMergePolicy(50))
@ -217,7 +217,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
assertEquals(98, ir.numDocs());
ir.close();
IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.COMPOUND_FILES);
writer = new IndexWriter(dir, dontMergeConfig);
for(int i=0;i<98;i+=2) {
@ -231,7 +231,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergePolicy(newLogMergePolicy(3))
);
assertEquals(49, writer.numDocs());
@ -250,7 +250,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).
setMergePolicy(newLogMergePolicy(50))
@ -283,7 +283,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
assertEquals(98, ir.numDocs());
ir.close();
IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.COMPOUND_FILES);
writer = new IndexWriter(dir, dontMergeConfig);
for(int i=0;i<98;i+=2) {
@ -296,7 +296,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
writer = new IndexWriter(
dir,
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergePolicy(newLogMergePolicy(3))
);
writer.forceMergeDeletes(false);
@ -335,7 +335,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
public void testSetMaxMergeDocs() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMergeScheduler(new MyMergeScheduler()).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy());
LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
lmp.setMaxMergeDocs(20);
@ -369,7 +369,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
}
IndexWriterConfig conf = newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)).
TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setOpenMode(OpenMode.CREATE).
setMaxBufferedDocs(2).
setMergePolicy(newLogMergePolicy());
@ -442,7 +442,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
reader.close();
// Reopen
writer = new IndexWriter(directory, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
writer = new IndexWriter(directory, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
}
writer.close();
}

View File

@ -40,13 +40,13 @@ public class TestIndexWriterNRTIsCurrent extends LuceneTestCase {
LockObtainFailedException, IOException, InterruptedException {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random));
new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
ReaderHolder holder = new ReaderHolder();
ReaderThread[] threads = new ReaderThread[atLeast(3)];
final CountDownLatch latch = new CountDownLatch(1);
WriterThread writerThread = new WriterThread(holder, writer,
atLeast(500), random, latch);
atLeast(500), random(), latch);
for (int i = 0; i < threads.length; i++) {
threads[i] = new ReaderThread(holder, latch);
threads[i].start();
@ -74,7 +74,6 @@ public class TestIndexWriterNRTIsCurrent extends LuceneTestCase {
private final ReaderHolder holder;
private final IndexWriter writer;
private final int numOps;
private final Random random;
private boolean countdown = true;
private final CountDownLatch latch;
Throwable failed;
@ -85,12 +84,12 @@ public class TestIndexWriterNRTIsCurrent extends LuceneTestCase {
this.holder = holder;
this.writer = writer;
this.numOps = numOps;
this.random = random;
this.latch = latch;
}
public void run() {
DirectoryReader currentReader = null;
Random random = LuceneTestCase.random();
try {
Document doc = new Document();
doc.add(new Field("id", "1", TextField.TYPE_UNSTORED));

View File

@ -53,14 +53,14 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
System.out.println("TEST: pass=" + pass);
}
boolean doAbort = pass == 1;
long diskFree = _TestUtil.nextInt(random, 100, 300);
long diskFree = _TestUtil.nextInt(random(), 100, 300);
while(true) {
if (VERBOSE) {
System.out.println("TEST: cycle: diskFree=" + diskFree);
}
MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory());
MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new RAMDirectory());
dir.setMaxSizeInBytes(diskFree);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
MergeScheduler ms = writer.getConfig().getMergeScheduler();
if (ms instanceof ConcurrentMergeScheduler) {
// This test intentionally produces exceptions
@ -120,7 +120,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
dir.close();
// Now try again w/ more space:
diskFree += TEST_NIGHTLY ? _TestUtil.nextInt(random, 400, 600) : _TestUtil.nextInt(random, 3000, 5000);
diskFree += TEST_NIGHTLY ? _TestUtil.nextInt(random(), 400, 600) : _TestUtil.nextInt(random(), 3000, 5000);
} else {
//_TestUtil.syncConcurrentMerges(writer);
dir.setMaxSizeInBytes(0);
@ -168,7 +168,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
long inputDiskUsage = 0;
for(int i=0;i<NUM_DIR;i++) {
dirs[i] = newDirectory();
IndexWriter writer = new IndexWriter(dirs[i], newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dirs[i], newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
for(int j=0;j<25;j++) {
addDocWithIndex(writer, 25*i+j);
}
@ -182,7 +182,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
// Now, build a starting index that has START_COUNT docs. We
// will then try to addIndexes into a copy of this:
MockDirectoryWrapper startDir = newDirectory();
IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
for(int j=0;j<START_COUNT;j++) {
addDocWithIndex(writer, j);
}
@ -224,7 +224,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
System.out.println("TEST: iter=" + iter);
// Start with 100 bytes more than we are currently using:
long diskFree = diskUsage+_TestUtil.nextInt(random, 50, 200);
long diskFree = diskUsage+_TestUtil.nextInt(random(), 50, 200);
int method = iter;
@ -246,8 +246,8 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
}
// Make a new dir that will enforce disk usage:
MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory(startDir, newIOContext(random)));
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new RAMDirectory(startDir, newIOContext(random())));
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
IOException err = null;
MergeScheduler ms = writer.getConfig().getMergeScheduler();
@ -440,7 +440,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
dir.close();
// Try again with more free space:
diskFree += TEST_NIGHTLY ? _TestUtil.nextInt(random, 4000, 8000) : _TestUtil.nextInt(random, 40000, 80000);
diskFree += TEST_NIGHTLY ? _TestUtil.nextInt(random(), 4000, 8000) : _TestUtil.nextInt(random(), 40000, 80000);
}
}
@ -478,7 +478,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
//IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setReaderPooling(true));
IndexWriter w = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergeScheduler(new SerialMergeScheduler()).
setReaderPooling(true).
setMergePolicy(newLogMergePolicy(2))
@ -519,7 +519,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
// OK:
public void testImmediateDiskFull() throws IOException {
MockDirectoryWrapper dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(2).setMergeScheduler(new ConcurrentMergeScheduler()));
dir.setMaxSizeInBytes(Math.max(1, dir.getRecomputedActualSizeInBytes()));
final Document doc = new Document();

View File

@ -64,7 +64,7 @@ public class TestIndexWriterOnJRECrash extends TestNRTThreads {
}
} else {
// we are the fork, setup a crashing thread
final int crashTime = _TestUtil.nextInt(random, 3000, 4000);
final int crashTime = _TestUtil.nextInt(random(), 3000, 4000);
Thread t = new Thread() {
@Override
public void run() {
@ -96,7 +96,7 @@ public class TestIndexWriterOnJRECrash extends TestNRTThreads {
// passing NIGHTLY to this test makes it run for much longer, easier to catch it in the act...
cmd.add("-Dtests.nightly=true");
cmd.add("-DtempDir=" + tempDir.getPath());
cmd.add("-Dtests.seed=" + random.nextLong() + ":" + random.nextLong());
cmd.add("-Dtests.seed=" + random().nextLong() + ":" + random().nextLong());
cmd.add("-ea");
cmd.add("-cp");
cmd.add(System.getProperty("java.class.path"));

View File

@ -51,7 +51,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
public static int count(Term t, IndexReader r) throws IOException {
int count = 0;
DocsEnum td = _TestUtil.docs(random, r,
DocsEnum td = _TestUtil.docs(random(), r,
t.field(), new BytesRef(t.text()),
MultiFields.getLiveDocs(r),
null,
@ -68,26 +68,26 @@ public class TestIndexWriterReader extends LuceneTestCase {
public void testAddCloseOpen() throws IOException {
Directory dir1 = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir1, iwc);
for (int i = 0; i < 97 ; i++) {
DirectoryReader reader = writer.getReader();
if (i == 0) {
writer.addDocument(DocHelper.createDocument(i, "x", 1 + random.nextInt(5)));
writer.addDocument(DocHelper.createDocument(i, "x", 1 + random().nextInt(5)));
} else {
int previous = random.nextInt(i);
int previous = random().nextInt(i);
// a check if the reader is current here could fail since there might be
// merges going on.
switch (random.nextInt(5)) {
switch (random().nextInt(5)) {
case 0:
case 1:
case 2:
writer.addDocument(DocHelper.createDocument(i, "x", 1 + random.nextInt(5)));
writer.addDocument(DocHelper.createDocument(i, "x", 1 + random().nextInt(5)));
break;
case 3:
writer.updateDocument(new Term("id", "" + previous), DocHelper.createDocument(
previous, "x", 1 + random.nextInt(5)));
previous, "x", 1 + random().nextInt(5)));
break;
case 4:
writer.deleteDocuments(new Term("id", "" + previous));
@ -102,10 +102,10 @@ public class TestIndexWriterReader extends LuceneTestCase {
assertTrue(reader.isCurrent());
writer.close();
assertTrue(reader.isCurrent()); // all changes are visible to the reader
iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
writer = new IndexWriter(dir1, iwc);
assertTrue(reader.isCurrent());
writer.addDocument(DocHelper.createDocument(1, "x", 1+random.nextInt(5)));
writer.addDocument(DocHelper.createDocument(1, "x", 1+random().nextInt(5)));
assertTrue(reader.isCurrent()); // segments in ram but IW is different to the readers one
writer.close();
assertFalse(reader.isCurrent()); // segments written
@ -117,12 +117,12 @@ public class TestIndexWriterReader extends LuceneTestCase {
boolean doFullMerge = true;
Directory dir1 = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
if (iwc.getMaxBufferedDocs() < 20) {
iwc.setMaxBufferedDocs(20);
}
// no merging
if (random.nextBoolean()) {
if (random().nextBoolean()) {
iwc.setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES);
} else {
iwc.setMergePolicy(NoMergePolicy.COMPOUND_FILES);
@ -167,7 +167,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
assertEquals(0, count(new Term("id", id10), r3));
assertEquals(1, count(new Term("id", Integer.toString(8000)), r3));
writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document doc = new Document();
doc.add(newField("field", "a b c", TextField.TYPE_UNSTORED));
writer.addDocument(doc);
@ -187,7 +187,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
public void testIsCurrent() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, iwc);
Document doc = new Document();
@ -195,7 +195,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
writer.addDocument(doc);
writer.close();
iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
writer = new IndexWriter(dir, iwc);
doc = new Document();
doc.add(newField("field", "a b c", TextField.TYPE_UNSTORED));
@ -232,12 +232,12 @@ public class TestIndexWriterReader extends LuceneTestCase {
boolean doFullMerge = false;
Directory dir1 = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
if (iwc.getMaxBufferedDocs() < 20) {
iwc.setMaxBufferedDocs(20);
}
// no merging
if (random.nextBoolean()) {
if (random().nextBoolean()) {
iwc.setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES);
} else {
iwc.setMergePolicy(NoMergePolicy.COMPOUND_FILES);
@ -250,7 +250,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
// create a 2nd index
Directory dir2 = newDirectory();
IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
createIndexNoClose(!doFullMerge, "index2", writer2);
writer2.close();
@ -287,11 +287,11 @@ public class TestIndexWriterReader extends LuceneTestCase {
boolean doFullMerge = false;
Directory dir1 = newDirectory();
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
// create a 2nd index
Directory dir2 = newDirectory();
IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
createIndexNoClose(!doFullMerge, "index2", writer2);
writer2.close();
@ -319,7 +319,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
boolean doFullMerge = true;
Directory dir1 = newDirectory();
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setReaderTermsIndexDivisor(2));
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setReaderTermsIndexDivisor(2));
// create the index
createIndexNoClose(!doFullMerge, "index1", writer);
writer.flush(false, true);
@ -356,7 +356,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
writer.close();
// reopen the writer to verify the delete made it to the directory
writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexReader w2r1 = writer.getReader();
assertEquals(0, count(new Term("id", id10), w2r1));
w2r1.close();
@ -369,7 +369,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
int numDirs = 3;
Directory mainDir = newDirectory();
IndexWriter mainWriter = new IndexWriter(mainDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
IndexWriter mainWriter = new IndexWriter(mainDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
_TestUtil.reduceOpenFiles(mainWriter);
AddDirectoriesThreads addDirThreads = new AddDirectoriesThreads(numIter, mainWriter);
@ -412,7 +412,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
this.numDirs = numDirs;
this.mainWriter = mainWriter;
addDir = newDirectory();
IndexWriter writer = new IndexWriter(addDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
IndexWriter writer = new IndexWriter(addDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2));
for (int i = 0; i < NUM_INIT_DOCS; i++) {
Document doc = DocHelper.createDocument(i, "addindex", 4);
writer.addDocument(doc);
@ -463,7 +463,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
try {
final Directory[] dirs = new Directory[numDirs];
for (int k = 0; k < numDirs; k++)
dirs[k] = new MockDirectoryWrapper(random, new RAMDirectory(addDir, newIOContext(random)));
dirs[k] = new MockDirectoryWrapper(random(), new RAMDirectory(addDir, newIOContext(random())));
//int j = 0;
//while (true) {
// System.out.println(Thread.currentThread().getName() + ": iter
@ -521,7 +521,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
*/
public void doTestIndexWriterReopenSegment(boolean doFullMerge) throws Exception {
Directory dir1 = newDirectory();
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexReader r1 = writer.getReader();
assertEquals(0, r1.maxDoc());
createIndexNoClose(false, "index1", writer);
@ -557,7 +557,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
writer.close();
// test whether the changes made it to the directory
writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexReader w2r1 = writer.getReader();
// insure the deletes were actually flushed to the directory
assertEquals(200, w2r1.maxDoc());
@ -615,7 +615,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
MyWarmer warmer = new MyWarmer();
IndexWriter writer = new IndexWriter(
dir1,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setMergedSegmentWarmer(warmer).
setMergeScheduler(new ConcurrentMergeScheduler()).
@ -650,7 +650,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
public void testAfterCommit() throws Exception {
Directory dir1 = newDirectory();
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergeScheduler(new ConcurrentMergeScheduler()));
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergeScheduler(new ConcurrentMergeScheduler()));
writer.commit();
// create the index
@ -682,7 +682,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
// Make sure reader remains usable even if IndexWriter closes
public void testAfterClose() throws Exception {
Directory dir1 = newDirectory();
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
// create the index
createIndexNoClose(false, "test", writer);
@ -712,7 +712,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
MockDirectoryWrapper dir1 = newDirectory();
final IndexWriter writer = new IndexWriter(
dir1,
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergePolicy(newLogMergePolicy(2))
);
@ -722,7 +722,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
final Directory[] dirs = new Directory[10];
for (int i=0;i<10;i++) {
dirs[i] = new MockDirectoryWrapper(random, new RAMDirectory(dir1, newIOContext(random)));
dirs[i] = new MockDirectoryWrapper(random(), new RAMDirectory(dir1, newIOContext(random())));
}
DirectoryReader r = writer.getReader();
@ -795,7 +795,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
Directory dir1 = newDirectory();
final IndexWriter writer = new IndexWriter(
dir1,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergePolicy(newLogMergePolicy(2))
);
@ -813,7 +813,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
final Thread[] threads = new Thread[numThreads];
for(int i=0;i<numThreads;i++) {
threads[i] = new Thread() {
final Random r = new Random(random.nextLong());
final Random r = new Random(random().nextLong());
@Override
public void run() {
@ -875,7 +875,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
public void testForceMergeDeletes() throws Throwable {
Directory dir = newDirectory();
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
Document doc = new Document();
doc.add(newField("field", "a b c", TextField.TYPE_UNSTORED));
Field id = newField("id", "", StringField.TYPE_UNSTORED);
@ -899,7 +899,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
public void testDeletesNumDocs() throws Throwable {
Directory dir = newDirectory();
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document doc = new Document();
doc.add(newField("field", "a b c", TextField.TYPE_UNSTORED));
Field id = newField("id", "", StringField.TYPE_UNSTORED);
@ -929,7 +929,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
public void testEmptyIndex() throws Exception {
// Ensures that getReader works on an empty index, which hasn't been committed yet.
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexReader r = w.getReader();
assertEquals(0, r.numDocs());
r.close();
@ -942,7 +942,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
final AtomicBoolean didWarm = new AtomicBoolean();
IndexWriter w = new IndexWriter(
dir,
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setReaderPooling(true).
setMergedSegmentWarmer(new IndexWriter.IndexReaderWarmer() {
@ -975,7 +975,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
"Lucene3x".equals(Codec.getDefault().getName()));
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setReaderTermsIndexDivisor(-1);
new MockAnalyzer(random())).setReaderTermsIndexDivisor(-1);
// Don't proceed if picked Codec is in the list of illegal ones.
final String format = _TestUtil.getPostingsFormat("f");
@ -989,7 +989,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
w.addDocument(doc);
IndexReader r = IndexReader.open(w, true).getSequentialSubReaders()[0];
try {
_TestUtil.docs(random, r, "f", new BytesRef("val"), null, null, false);
_TestUtil.docs(random(), r, "f", new BytesRef("val"), null, null, false);
fail("should have failed to seek since terms index was not loaded.");
} catch (IllegalStateException e) {
// expected - we didn't load the term index
@ -1004,7 +1004,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
Directory d = newDirectory();
IndexWriter w = new IndexWriter(
d,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
DirectoryReader r = w.getReader(); // start pooling readers

View File

@ -64,7 +64,7 @@ public class TestIndexWriterUnicode extends LuceneTestCase {
};
private int nextInt(int lim) {
return random.nextInt(lim);
return random().nextInt(lim);
}
private int nextInt(int start, int end) {
@ -98,7 +98,7 @@ public class TestIndexWriterUnicode extends LuceneTestCase {
else if (5 == t && i < len-1) {
// Illegal unpaired surrogate
if (nextInt(10) == 7) {
if (random.nextBoolean())
if (random().nextBoolean())
buffer[i] = (char) nextInt(0xd800, 0xdc00);
else
buffer[i] = (char) nextInt(0xdc00, 0xe000);
@ -235,7 +235,7 @@ public class TestIndexWriterUnicode extends LuceneTestCase {
public void testEmbeddedFFFF() throws Throwable {
Directory d = newDirectory();
IndexWriter w = new IndexWriter(d, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter w = new IndexWriter(d, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document doc = new Document();
doc.add(newField("field", "a a\uffffb", TextField.TYPE_UNSTORED));
w.addDocument(doc);
@ -274,7 +274,7 @@ public class TestIndexWriterUnicode extends LuceneTestCase {
// Make sure terms, including ones with surrogate pairs,
// sort in codepoint sort order by default
public void testTermUTF16SortOrder() throws Throwable {
Random rnd = random;
Random rnd = random();
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(rnd, dir);
Document d = new Document();

View File

@ -126,7 +126,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
MockDirectoryWrapper dir = newDirectory();
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setMergeScheduler(new ConcurrentMergeScheduler()).
setMergePolicy(newLogMergePolicy(4))
@ -170,7 +170,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(10).
setMergeScheduler(new ConcurrentMergeScheduler()).
setMergePolicy(newLogMergePolicy(4))
@ -211,7 +211,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
// Quick test to make sure index is not corrupt:
IndexReader reader = IndexReader.open(dir);
DocsEnum tdocs = _TestUtil.docs(random, reader,
DocsEnum tdocs = _TestUtil.docs(random(), reader,
"field",
new BytesRef("aaa"),
MultiFields.getLiveDocs(reader),
@ -242,7 +242,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setMergeScheduler(new ConcurrentMergeScheduler()).
setMergePolicy(newLogMergePolicy(4))
@ -297,7 +297,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
public void _testSingleThreadFailure(MockDirectoryWrapper.Failure failure) throws IOException {
MockDirectoryWrapper dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(2).setMergeScheduler(new ConcurrentMergeScheduler()));
final Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
@ -480,7 +480,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
Field field = newField("field", "testData", TextField.TYPE_STORED);
doc.add(field);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
iwConstructed.countDown();
startIndexing.await();
writer.addDocument(doc);

View File

@ -100,7 +100,7 @@ public class TestIndexableField extends LuceneTestCase {
@Override
public float boost() {
return 1.0f + random.nextFloat();
return 1.0f + random().nextFloat();
}
@Override
@ -157,7 +157,7 @@ public class TestIndexableField extends LuceneTestCase {
public void testArbitraryFields() throws Exception {
final Directory dir = newDirectory();
final RandomIndexWriter w = new RandomIndexWriter(random, dir);
final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
final int NUM_DOCS = atLeast(27);
if (VERBOSE) {
@ -167,7 +167,7 @@ public class TestIndexableField extends LuceneTestCase {
int baseCount = 0;
for(int docCount=0;docCount<NUM_DOCS;docCount++) {
final int fieldCount = _TestUtil.nextInt(random, 1, 17);
final int fieldCount = _TestUtil.nextInt(random(), 1, 17);
fieldsPerDoc[docCount] = fieldCount-1;
final int finalDocCount = docCount;

View File

@ -38,7 +38,7 @@ public class TestIsCurrent extends LuceneTestCase {
// initialize directory
directory = newDirectory();
writer = new RandomIndexWriter(random, directory);
writer = new RandomIndexWriter(random(), directory);
// write document
Document doc = new Document();

View File

@ -50,7 +50,7 @@ public class TestLazyProxSkipping extends LuceneTestCase {
private class SeekCountingDirectory extends MockDirectoryWrapper {
public SeekCountingDirectory(Directory delegate) {
super(random, delegate);
super(random(), delegate);
}
@Override
@ -143,7 +143,7 @@ public class TestLazyProxSkipping extends LuceneTestCase {
public void testSeek() throws IOException {
Directory directory = newDirectory();
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
for (int i = 0; i < 10; i++) {
Document doc = new Document();
doc.add(newField(this.field, "a b", TextField.TYPE_STORED));

View File

@ -43,9 +43,9 @@ public class TestLongPostings extends LuceneTestCase {
// Produces a realistic unicode random string that
// survives MockAnalyzer unchanged:
private String getRandomTerm(String other) throws IOException {
Analyzer a = new MockAnalyzer(random);
Analyzer a = new MockAnalyzer(random());
while(true) {
String s = _TestUtil.randomRealisticUnicodeString(random);
String s = _TestUtil.randomRealisticUnicodeString(random());
if (other != null && s.equals(other)) {
continue;
}
@ -80,7 +80,7 @@ public class TestLongPostings extends LuceneTestCase {
public void testLongPostings() throws Exception {
// Don't use _TestUtil.getTempDir so that we own the
// randomness (ie same seed will point to same dir):
Directory dir = newFSDirectory(_TestUtil.getTempDir("longpostings" + "." + random.nextLong()));
Directory dir = newFSDirectory(_TestUtil.getTempDir("longpostings" + "." + random().nextLong()));
final int NUM_DOCS = atLeast(2000);
@ -105,24 +105,24 @@ public class TestLongPostings extends LuceneTestCase {
final FixedBitSet isS1 = new FixedBitSet(NUM_DOCS);
for(int idx=0;idx<NUM_DOCS;idx++) {
if (random.nextBoolean()) {
if (random().nextBoolean()) {
isS1.set(idx);
}
}
final IndexReader r;
final IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
final IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setOpenMode(IndexWriterConfig.OpenMode.CREATE)
.setMergePolicy(newLogMergePolicy());
iwc.setRAMBufferSizeMB(16.0 + 16.0 * random.nextDouble());
iwc.setRAMBufferSizeMB(16.0 + 16.0 * random().nextDouble());
iwc.setMaxBufferedDocs(-1);
final RandomIndexWriter riw = new RandomIndexWriter(random, dir, iwc);
final RandomIndexWriter riw = new RandomIndexWriter(random(), dir, iwc);
for(int idx=0;idx<NUM_DOCS;idx++) {
final Document doc = new Document();
String s = isS1.get(idx) ? s1 : s2;
final Field f = newField("field", s, TextField.TYPE_UNSTORED);
final int count = _TestUtil.nextInt(random, 1, 4);
final int count = _TestUtil.nextInt(random(), 1, 4);
for(int ct=0;ct<count;ct++) {
doc.add(f);
}
@ -158,7 +158,7 @@ public class TestLongPostings extends LuceneTestCase {
final String term;
final boolean doS1;
if (random.nextBoolean()) {
if (random().nextBoolean()) {
term = s1;
doS1 = true;
} else {
@ -174,7 +174,7 @@ public class TestLongPostings extends LuceneTestCase {
int docID = -1;
while(docID < DocIdSetIterator.NO_MORE_DOCS) {
final int what = random.nextInt(3);
final int what = random().nextInt(3);
if (what == 0) {
if (VERBOSE) {
System.out.println("TEST: docID=" + docID + "; do next()");
@ -200,12 +200,12 @@ public class TestLongPostings extends LuceneTestCase {
break;
}
if (random.nextInt(6) == 3) {
if (random().nextInt(6) == 3) {
final int freq = postings.freq();
assertTrue(freq >=1 && freq <= 4);
for(int pos=0;pos<freq;pos++) {
assertEquals(pos, postings.nextPosition());
if (random.nextBoolean() && postings.hasPayload()) {
if (random().nextBoolean() && postings.hasPayload()) {
postings.getPayload();
}
}
@ -214,9 +214,9 @@ public class TestLongPostings extends LuceneTestCase {
// advance
final int targetDocID;
if (docID == -1) {
targetDocID = random.nextInt(NUM_DOCS+1);
targetDocID = random().nextInt(NUM_DOCS+1);
} else {
targetDocID = docID + _TestUtil.nextInt(random, 1, NUM_DOCS - docID);
targetDocID = docID + _TestUtil.nextInt(random(), 1, NUM_DOCS - docID);
}
if (VERBOSE) {
System.out.println("TEST: docID=" + docID + "; do advance(" + targetDocID + ")");
@ -242,12 +242,12 @@ public class TestLongPostings extends LuceneTestCase {
break;
}
if (random.nextInt(6) == 3) {
if (random().nextInt(6) == 3) {
final int freq = postings.freq();
assertTrue(freq >=1 && freq <= 4);
for(int pos=0;pos<freq;pos++) {
assertEquals(pos, postings.nextPosition());
if (random.nextBoolean() && postings.hasPayload()) {
if (random().nextBoolean() && postings.hasPayload()) {
postings.getPayload();
}
}
@ -268,7 +268,7 @@ public class TestLongPostings extends LuceneTestCase {
public void doTestLongPostingsNoPositions(IndexOptions options) throws Exception {
// Don't use _TestUtil.getTempDir so that we own the
// randomness (ie same seed will point to same dir):
Directory dir = newFSDirectory(_TestUtil.getTempDir("longpostings" + "." + random.nextLong()));
Directory dir = newFSDirectory(_TestUtil.getTempDir("longpostings" + "." + random().nextLong()));
final int NUM_DOCS = atLeast(2000);
@ -293,19 +293,19 @@ public class TestLongPostings extends LuceneTestCase {
final FixedBitSet isS1 = new FixedBitSet(NUM_DOCS);
for(int idx=0;idx<NUM_DOCS;idx++) {
if (random.nextBoolean()) {
if (random().nextBoolean()) {
isS1.set(idx);
}
}
final IndexReader r;
if (true) {
final IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
final IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setOpenMode(IndexWriterConfig.OpenMode.CREATE)
.setMergePolicy(newLogMergePolicy());
iwc.setRAMBufferSizeMB(16.0 + 16.0 * random.nextDouble());
iwc.setRAMBufferSizeMB(16.0 + 16.0 * random().nextDouble());
iwc.setMaxBufferedDocs(-1);
final RandomIndexWriter riw = new RandomIndexWriter(random, dir, iwc);
final RandomIndexWriter riw = new RandomIndexWriter(random(), dir, iwc);
FieldType ft = new FieldType(TextField.TYPE_UNSTORED);
ft.setIndexOptions(options);
@ -313,7 +313,7 @@ public class TestLongPostings extends LuceneTestCase {
final Document doc = new Document();
String s = isS1.get(idx) ? s1 : s2;
final Field f = newField("field", s, ft);
final int count = _TestUtil.nextInt(random, 1, 4);
final int count = _TestUtil.nextInt(random(), 1, 4);
for(int ct=0;ct<count;ct++) {
doc.add(f);
}
@ -352,7 +352,7 @@ public class TestLongPostings extends LuceneTestCase {
final String term;
final boolean doS1;
if (random.nextBoolean()) {
if (random().nextBoolean()) {
term = s1;
doS1 = true;
} else {
@ -368,17 +368,17 @@ public class TestLongPostings extends LuceneTestCase {
final DocsEnum postings;
if (options == IndexOptions.DOCS_ONLY) {
docs = _TestUtil.docs(random, r, "field", new BytesRef(term), null, null, false);
docs = _TestUtil.docs(random(), r, "field", new BytesRef(term), null, null, false);
postings = null;
} else {
docs = postings = _TestUtil.docs(random, r, "field", new BytesRef(term), null, null, true);
docs = postings = _TestUtil.docs(random(), r, "field", new BytesRef(term), null, null, true);
assert postings != null;
}
assert docs != null;
int docID = -1;
while(docID < DocIdSetIterator.NO_MORE_DOCS) {
final int what = random.nextInt(3);
final int what = random().nextInt(3);
if (what == 0) {
if (VERBOSE) {
System.out.println("TEST: docID=" + docID + "; do next()");
@ -404,7 +404,7 @@ public class TestLongPostings extends LuceneTestCase {
break;
}
if (random.nextInt(6) == 3 && postings != null) {
if (random().nextInt(6) == 3 && postings != null) {
final int freq = postings.freq();
assertTrue(freq >=1 && freq <= 4);
}
@ -412,9 +412,9 @@ public class TestLongPostings extends LuceneTestCase {
// advance
final int targetDocID;
if (docID == -1) {
targetDocID = random.nextInt(NUM_DOCS+1);
targetDocID = random().nextInt(NUM_DOCS+1);
} else {
targetDocID = docID + _TestUtil.nextInt(random, 1, NUM_DOCS - docID);
targetDocID = docID + _TestUtil.nextInt(random(), 1, NUM_DOCS - docID);
}
if (VERBOSE) {
System.out.println("TEST: docID=" + docID + "; do advance(" + targetDocID + ")");
@ -440,7 +440,7 @@ public class TestLongPostings extends LuceneTestCase {
break;
}
if (random.nextInt(6) == 3 && postings != null) {
if (random().nextInt(6) == 3 && postings != null) {
final int freq = postings.freq();
assertTrue("got invalid freq=" + freq, freq >=1 && freq <= 4);
}

View File

@ -46,9 +46,9 @@ public class TestMaxTermFrequency extends LuceneTestCase {
super.setUp();
dir = newDirectory();
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random, MockTokenizer.SIMPLE, true)).setMergePolicy(newLogMergePolicy());
new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)).setMergePolicy(newLogMergePolicy());
config.setSimilarity(new TestSimilarity());
RandomIndexWriter writer = new RandomIndexWriter(random, dir, config);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config);
Document doc = new Document();
Field foo = newField("foo", "", TextField.TYPE_UNSTORED);
doc.add(foo);
@ -81,16 +81,16 @@ public class TestMaxTermFrequency extends LuceneTestCase {
*/
private String addValue() {
List<String> terms = new ArrayList<String>();
int maxCeiling = _TestUtil.nextInt(random, 0, 255);
int maxCeiling = _TestUtil.nextInt(random(), 0, 255);
int max = 0;
for (char ch = 'a'; ch <= 'z'; ch++) {
int num = _TestUtil.nextInt(random, 0, maxCeiling);
int num = _TestUtil.nextInt(random(), 0, maxCeiling);
for (int i = 0; i < num; i++)
terms.add(Character.toString(ch));
max = Math.max(max, num);
}
expected.add(max);
Collections.shuffle(terms, random);
Collections.shuffle(terms, random());
return Arrays.toString(terms.toArray(new String[terms.size()]));
}

View File

@ -49,8 +49,8 @@ public class TestMixedCodecs extends LuceneTestCase {
System.out.println("TEST: " + docUpto + " of " + NUM_DOCS);
}
if (docsLeftInThisSegment == 0) {
final IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
if (random.nextBoolean()) {
final IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
if (random().nextBoolean()) {
// Make sure we aggressively mix in SimpleText
// since it has different impls for all codec
// formats...
@ -59,8 +59,8 @@ public class TestMixedCodecs extends LuceneTestCase {
if (w != null) {
w.close();
}
w = new RandomIndexWriter(random, dir, iwc);
docsLeftInThisSegment = _TestUtil.nextInt(random, 10, 100);
w = new RandomIndexWriter(random(), dir, iwc);
docsLeftInThisSegment = _TestUtil.nextInt(random(), 10, 100);
}
final Document doc = new Document();
doc.add(newField("id", String.valueOf(docUpto), StringField.TYPE_STORED));
@ -76,11 +76,11 @@ public class TestMixedCodecs extends LuceneTestCase {
// Random delete half the docs:
final Set<Integer> deleted = new HashSet<Integer>();
while(deleted.size() < NUM_DOCS/2) {
final Integer toDelete = random.nextInt(NUM_DOCS);
final Integer toDelete = random().nextInt(NUM_DOCS);
if (!deleted.contains(toDelete)) {
deleted.add(toDelete);
w.deleteDocuments(new Term("id", String.valueOf(toDelete)));
if (random.nextInt(17) == 6) {
if (random().nextInt(17) == 6) {
final IndexReader r = w.getReader();
assertEquals(NUM_DOCS - deleted.size(), r.numDocs());
r.close();

View File

@ -36,34 +36,34 @@ public class TestMultiFields extends LuceneTestCase {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
_TestUtil.keepFullyDeletedSegments(w);
Map<BytesRef,List<Integer>> docs = new HashMap<BytesRef,List<Integer>>();
Set<Integer> deleted = new HashSet<Integer>();
List<BytesRef> terms = new ArrayList<BytesRef>();
int numDocs = _TestUtil.nextInt(random, 1, 100 * RANDOM_MULTIPLIER);
int numDocs = _TestUtil.nextInt(random(), 1, 100 * RANDOM_MULTIPLIER);
Document doc = new Document();
Field f = newField("field", "", StringField.TYPE_UNSTORED);
doc.add(f);
Field id = newField("id", "", StringField.TYPE_UNSTORED);
doc.add(id);
boolean onlyUniqueTerms = random.nextBoolean();
boolean onlyUniqueTerms = random().nextBoolean();
if (VERBOSE) {
System.out.println("TEST: onlyUniqueTerms=" + onlyUniqueTerms + " numDocs=" + numDocs);
}
Set<BytesRef> uniqueTerms = new HashSet<BytesRef>();
for(int i=0;i<numDocs;i++) {
if (!onlyUniqueTerms && random.nextBoolean() && terms.size() > 0) {
if (!onlyUniqueTerms && random().nextBoolean() && terms.size() > 0) {
// re-use existing term
BytesRef term = terms.get(random.nextInt(terms.size()));
BytesRef term = terms.get(random().nextInt(terms.size()));
docs.get(term).add(i);
f.setStringValue(term.utf8ToString());
} else {
String s = _TestUtil.randomUnicodeString(random, 10);
String s = _TestUtil.randomUnicodeString(random(), 10);
BytesRef term = new BytesRef(s);
if (!docs.containsKey(term)) {
docs.put(term, new ArrayList<Integer>());
@ -75,11 +75,11 @@ public class TestMultiFields extends LuceneTestCase {
}
id.setStringValue(""+i);
w.addDocument(doc);
if (random.nextInt(4) == 1) {
if (random().nextInt(4) == 1) {
w.commit();
}
if (i > 0 && random.nextInt(20) == 1) {
int delID = random.nextInt(i);
if (i > 0 && random().nextInt(20) == 1) {
int delID = random().nextInt(i);
deleted.add(delID);
w.deleteDocuments(new Term("id", ""+delID));
if (VERBOSE) {
@ -116,12 +116,12 @@ public class TestMultiFields extends LuceneTestCase {
}
for(int i=0;i<100;i++) {
BytesRef term = terms.get(random.nextInt(terms.size()));
BytesRef term = terms.get(random().nextInt(terms.size()));
if (VERBOSE) {
System.out.println("TEST: seek term="+ UnicodeUtil.toHexString(term.utf8ToString()) + " " + term);
}
DocsEnum docsEnum = _TestUtil.docs(random, reader, "field", term, liveDocs, null, false);
DocsEnum docsEnum = _TestUtil.docs(random(), reader, "field", term, liveDocs, null, false);
assertNotNull(docsEnum);
for(int docID : docs.get(term)) {
@ -154,7 +154,7 @@ public class TestMultiFields extends LuceneTestCase {
public void testSeparateEnums() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
Document d = new Document();
d.add(newField("f", "j", StringField.TYPE_UNSTORED));
w.addDocument(d);
@ -162,8 +162,8 @@ public class TestMultiFields extends LuceneTestCase {
w.addDocument(d);
IndexReader r = w.getReader();
w.close();
DocsEnum d1 = _TestUtil.docs(random, r, "f", new BytesRef("j"), null, null, false);
DocsEnum d2 = _TestUtil.docs(random, r, "f", new BytesRef("j"), null, null, false);
DocsEnum d1 = _TestUtil.docs(random(), r, "f", new BytesRef("j"), null, null, false);
DocsEnum d2 = _TestUtil.docs(random(), r, "f", new BytesRef("j"), null, null, false);
assertEquals(0, d1.nextDoc());
assertEquals(0, d2.nextDoc());
r.close();

View File

@ -48,7 +48,7 @@ public class TestMultiLevelSkipList extends LuceneTestCase {
class CountingRAMDirectory extends MockDirectoryWrapper {
public CountingRAMDirectory(Directory delegate) {
super(random, delegate);
super(random(), delegate);
}
@Override

View File

@ -32,7 +32,7 @@ public class TestNRTReaderWithThreads extends LuceneTestCase {
Directory mainDir = newDirectory();
IndexWriter writer = new IndexWriter(
mainDir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(10).
setMergePolicy(newLogMergePolicy(false,2))
);
@ -76,7 +76,7 @@ public class TestNRTReaderWithThreads extends LuceneTestCase {
int delCount = 0;
int addCount = 0;
int type;
final Random r = new Random(random.nextLong());
final Random r = new Random(random().nextLong());
public RunThread(int type, IndexWriter writer) {
this.type = type;

View File

@ -39,7 +39,7 @@ public class TestNRTThreads extends ThreadedIndexingAndSearchingTestCase {
DirectoryReader r = IndexReader.open(writer, true);
while (System.currentTimeMillis() < stopTime && !failed.get()) {
if (random.nextBoolean()) {
if (random().nextBoolean()) {
if (VERBOSE) {
System.out.println("TEST: now reopen r=" + r);
}
@ -106,7 +106,7 @@ public class TestNRTThreads extends ThreadedIndexingAndSearchingTestCase {
@Override
protected IndexSearcher getFinalSearcher() throws Exception {
final IndexReader r2;
if (random.nextBoolean()) {
if (random().nextBoolean()) {
r2 = writer.getReader();
} else {
writer.commit();

Some files were not shown because too many files have changed in this diff Show More