upgrade randomized-testing to 2.1.1

Note that the standard `atLeast` implementation has now Integer.MAX_VALUE as upper bound, thus it behaves differently from what we expect in our tests, as we never expect the upper bound to be that high.
Added our own `atLeast` to `AbstractRandomizedTest` so that it has the expected behaviour with a reasonable upper bound.
See https://github.com/carrotsearch/randomizedtesting/issues/131
This commit is contained in:
javanna 2014-03-12 18:42:21 +01:00 committed by Simon Willnauer
parent 767bef0596
commit d80dd00424
57 changed files with 137 additions and 129 deletions

View File

@ -48,7 +48,7 @@
<dependency> <dependency>
<groupId>com.carrotsearch.randomizedtesting</groupId> <groupId>com.carrotsearch.randomizedtesting</groupId>
<artifactId>randomizedtesting-runner</artifactId> <artifactId>randomizedtesting-runner</artifactId>
<version>2.0.15</version> <version>2.1.1</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
@ -356,7 +356,7 @@
<plugin> <plugin>
<groupId>com.carrotsearch.randomizedtesting</groupId> <groupId>com.carrotsearch.randomizedtesting</groupId>
<artifactId>junit4-maven-plugin</artifactId> <artifactId>junit4-maven-plugin</artifactId>
<version>2.0.15</version> <version>2.1.1</version>
<executions> <executions>
<execution> <execution>
<id>tests</id> <id>tests</id>

View File

@ -63,7 +63,7 @@ public class BlendedTermQueryTest extends ElasticsearchLuceneTestCase {
d.add(new TextField("surname", surNames[i], Field.Store.NO)); d.add(new TextField("surname", surNames[i], Field.Store.NO));
w.addDocument(d); w.addDocument(d);
} }
int iters = atLeast(25); int iters = scaledRandomIntBetween(25, 100);
for (int j = 0; j < iters; j++) { for (int j = 0; j < iters; j++) {
Document d = new Document(); Document d = new Document();
d.add(new TextField("id", Integer.toString(firstNames.length + j), Field.Store.YES)); d.add(new TextField("id", Integer.toString(firstNames.length + j), Field.Store.YES));
@ -123,7 +123,7 @@ public class BlendedTermQueryTest extends ElasticsearchLuceneTestCase {
d.add(new Field("song", song[i], ft)); d.add(new Field("song", song[i], ft));
w.addDocument(d); w.addDocument(d);
} }
int iters = atLeast(25); int iters = scaledRandomIntBetween(25, 100);
for (int j = 0; j < iters; j++) { for (int j = 0; j < iters; j++) {
Document d = new Document(); Document d = new Document();
d.add(new TextField("id", Integer.toString(username.length + j), Field.Store.YES)); d.add(new TextField("id", Integer.toString(username.length + j), Field.Store.YES));
@ -171,7 +171,7 @@ public class BlendedTermQueryTest extends ElasticsearchLuceneTestCase {
@Test @Test
public void testBasics() { public void testBasics() {
final int iters = atLeast(5); final int iters = scaledRandomIntBetween(5, 25);
for (int j = 0; j < iters; j++) { for (int j = 0; j < iters; j++) {
String[] fields = new String[1 + random().nextInt(10)]; String[] fields = new String[1 + random().nextInt(10)];
for (int i = 0; i < fields.length; i++) { for (int i = 0; i < fields.length; i++) {
@ -210,7 +210,7 @@ public class BlendedTermQueryTest extends ElasticsearchLuceneTestCase {
@Test @Test
public void testExtractTerms() { public void testExtractTerms() {
Set<Term> terms = new HashSet<Term>(); Set<Term> terms = new HashSet<Term>();
int num = atLeast(1); int num = scaledRandomIntBetween(1, 10);
for (int i = 0; i < num; i++) { for (int i = 0; i < num; i++) {
terms.add(new Term(_TestUtil.randomRealisticUnicodeString(random(), 1, 10), _TestUtil.randomRealisticUnicodeString(random(), 1, 10))); terms.add(new Term(_TestUtil.randomRealisticUnicodeString(random(), 1, 10), _TestUtil.randomRealisticUnicodeString(random(), 1, 10)));
} }

View File

@ -451,7 +451,7 @@ public class CustomPostingsHighlighterTests extends ElasticsearchLuceneTestCase
Snippet[] snippets = highlighter.highlightDoc("body", filteredQueryTerms, searcher, docId, 5); Snippet[] snippets = highlighter.highlightDoc("body", filteredQueryTerms, searcher, docId, 5);
assertThat(snippets.length, equalTo(0)); assertThat(snippets.length, equalTo(0));
highlighter = new CustomPostingsHighlighter(passageFormatter, values, true, Integer.MAX_VALUE - 1, atLeast(1)); highlighter = new CustomPostingsHighlighter(passageFormatter, values, true, Integer.MAX_VALUE - 1, scaledRandomIntBetween(1, 10));
snippets = highlighter.highlightDoc("body", filteredQueryTerms, searcher, docId, 5); snippets = highlighter.highlightDoc("body", filteredQueryTerms, searcher, docId, 5);
assertThat(snippets.length, equalTo(1)); assertThat(snippets.length, equalTo(1));
assertThat(snippets[0].getText(), equalTo("This is a test.")); assertThat(snippets[0].getText(), equalTo("This is a test."));

View File

@ -1484,7 +1484,7 @@ public class XPostingsHighlighterTests extends ElasticsearchLuceneTestCase {
FieldType offsetsType = new FieldType(TextField.TYPE_STORED); FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
int numDocs = atLeast(100); int numDocs = scaledRandomIntBetween(100, 1000);
for(int i=0;i<numDocs;i++) { for(int i=0;i<numDocs;i++) {
Document doc = new Document(); Document doc = new Document();
String content = "the answer is " + i; String content = "the answer is " + i;

View File

@ -25,7 +25,6 @@ import org.junit.Test;
import static org.elasticsearch.Version.V_0_20_0; import static org.elasticsearch.Version.V_0_20_0;
import static org.elasticsearch.Version.V_0_90_0; import static org.elasticsearch.Version.V_0_90_0;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.sameInstance; import static org.hamcrest.Matchers.sameInstance;
@ -54,7 +53,7 @@ public class VersionTests extends ElasticsearchTestCase {
public void testVersionConstantPresent() { public void testVersionConstantPresent() {
assertThat(Version.CURRENT, sameInstance(Version.fromId(Version.CURRENT.id))); assertThat(Version.CURRENT, sameInstance(Version.fromId(Version.CURRENT.id)));
assertThat(Version.CURRENT.luceneVersion.ordinal(), equalTo(org.apache.lucene.util.Version.LUCENE_CURRENT.ordinal() - 1)); assertThat(Version.CURRENT.luceneVersion.ordinal(), equalTo(org.apache.lucene.util.Version.LUCENE_CURRENT.ordinal() - 1));
final int iters = atLeast(20); final int iters = scaledRandomIntBetween(20, 100);
for (int i = 0; i < iters; i++) { for (int i = 0; i < iters; i++) {
Version version = randomVersion(); Version version = randomVersion();
assertThat(version, sameInstance(Version.fromId(version.id))); assertThat(version, sameInstance(Version.fromId(version.id)));

View File

@ -51,7 +51,7 @@ public class HotThreadsTest extends ElasticsearchIntegrationTest {
* This test just checks if nothing crashes or gets stuck etc. * This test just checks if nothing crashes or gets stuck etc.
*/ */
createIndex("test"); createIndex("test");
final int iters = atLeast(2); final int iters = scaledRandomIntBetween(2, 20);
final AtomicBoolean hasErrors = new AtomicBoolean(false); final AtomicBoolean hasErrors = new AtomicBoolean(false);
for (int i = 0; i < iters; i++) { for (int i = 0; i < iters; i++) {
final String type; final String type;

View File

@ -38,7 +38,7 @@ public class DiskUsageTests extends ElasticsearchTestCase {
@Test @Test
public void randomDiskUsageTest() { public void randomDiskUsageTest() {
int iters = atLeast(1000); int iters = scaledRandomIntBetween(1000, 10000);
for (int i = 1; i < iters; i++) { for (int i = 1; i < iters; i++) {
long total = between(Integer.MIN_VALUE, Integer.MAX_VALUE); long total = between(Integer.MIN_VALUE, Integer.MAX_VALUE);
long free = between(Integer.MIN_VALUE, Integer.MAX_VALUE); long free = between(Integer.MIN_VALUE, Integer.MAX_VALUE);

View File

@ -191,7 +191,7 @@ public class NodeVersionAllocationDeciderTests extends ElasticsearchAllocationTe
assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(routingTable.allShards().size())); assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(routingTable.allShards().size()));
List<DiscoveryNode> nodes = new ArrayList<DiscoveryNode>(); List<DiscoveryNode> nodes = new ArrayList<DiscoveryNode>();
int nodeIdx = 0; int nodeIdx = 0;
int iters = atLeast(10); int iters = scaledRandomIntBetween(10, 100);
for (int i = 0; i < iters; i++) { for (int i = 0; i < iters; i++) {
DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(); DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder();
int numNodes = between(1, 20); int numNodes = between(1, 20);

View File

@ -80,7 +80,7 @@ public class RandomAllocationDeciderTests extends ElasticsearchAllocationTestCas
RoutingTable routingTable = routingTableBuilder.build(); RoutingTable routingTable = routingTableBuilder.build();
ClusterState clusterState = ClusterState.builder().metaData(metaData).routingTable(routingTable).build(); ClusterState clusterState = ClusterState.builder().metaData(metaData).routingTable(routingTable).build();
int numIters = atLeast(20); int numIters = scaledRandomIntBetween(20, 100);;
int nodeIdCounter = 0; int nodeIdCounter = 0;
int atMostNodes = between(Math.max(1, maxNumReplicas), numIters); int atMostNodes = between(Math.max(1, maxNumReplicas), numIters);
final boolean frequentNodes = randomBoolean(); final boolean frequentNodes = randomBoolean();
@ -90,7 +90,7 @@ public class RandomAllocationDeciderTests extends ElasticsearchAllocationTestCas
if (clusterState.nodes().size() <= atMostNodes && if (clusterState.nodes().size() <= atMostNodes &&
(nodeIdCounter == 0 || (frequentNodes ? frequently() : rarely()))) { (nodeIdCounter == 0 || (frequentNodes ? frequently() : rarely()))) {
int numNodes = atLeast(1); int numNodes = scaledRandomIntBetween(1, 15);
for (int j = 0; j < numNodes; j++) { for (int j = 0; j < numNodes; j++) {
logger.info("adding node [{}]", nodeIdCounter); logger.info("adding node [{}]", nodeIdCounter);
newNodesBuilder.put(newNode("NODE_" + (nodeIdCounter++))); newNodesBuilder.put(newNode("NODE_" + (nodeIdCounter++)));

View File

@ -31,14 +31,14 @@ import java.nio.LongBuffer;
public class MurmurHash3Tests extends ElasticsearchTestCase { public class MurmurHash3Tests extends ElasticsearchTestCase {
public void testHash128() { public void testHash128() {
final int iters = atLeast(100); final int iters = scaledRandomIntBetween(100, 5000);
for (int i = 0; i < iters; ++i) { for (int i = 0; i < iters; ++i) {
final int seed = randomInt(); final int seed = randomInt();
final int offset = randomInt(20); final int offset = randomInt(20);
final int len = randomInt(randomBoolean() ? 20 : 200); final int len = randomInt(randomBoolean() ? 20 : 200);
final byte[] bytes = new byte[len + offset + randomInt(3)]; final byte[] bytes = new byte[len + offset + randomInt(3)];
getRandom().nextBytes(bytes); getRandom().nextBytes(bytes);
HashCode h1 = Hashing.murmur3_128(seed).hashBytes(bytes, offset, len);; HashCode h1 = Hashing.murmur3_128(seed).hashBytes(bytes, offset, len);
MurmurHash3.Hash128 h2 = MurmurHash3.hash128(bytes, offset, len, seed, new MurmurHash3.Hash128()); MurmurHash3.Hash128 h2 = MurmurHash3.hash128(bytes, offset, len, seed, new MurmurHash3.Hash128());
assertEquals(h1, h2); assertEquals(h1, h2);
} }

View File

@ -434,7 +434,7 @@ public class XBooleanFilterTests extends ElasticsearchLuceneTestCase {
@Test @Test
public void testRandom() throws IOException { public void testRandom() throws IOException {
int iterations = atLeast(400); // don't worry that is fast! int iterations = scaledRandomIntBetween(100, 1000); // don't worry that is fast!
for (int iter = 0; iter < iterations; iter++) { for (int iter = 0; iter < iterations; iter++) {
int numClauses = 1 + random().nextInt(10); int numClauses = 1 + random().nextInt(10);
FilterClause[] clauses = new FilterClause[numClauses]; FilterClause[] clauses = new FilterClause[numClauses];

View File

@ -45,7 +45,7 @@ public class FuzzinessTests extends ElasticsearchTestCase {
@Test @Test
public void testParseFromXContent() throws IOException { public void testParseFromXContent() throws IOException {
final int iters = atLeast(10); final int iters = randomIntBetween(10, 50);
for (int i = 0; i < iters; i++) { for (int i = 0; i < iters; i++) {
{ {
XContent xcontent = XContentType.JSON.xContent(); XContent xcontent = XContentType.JSON.xContent();
@ -165,7 +165,7 @@ public class FuzzinessTests extends ElasticsearchTestCase {
@Test @Test
public void testAsDistance() { public void testAsDistance() {
final int iters = atLeast(10); final int iters = randomIntBetween(10, 50);
for (int i = 0; i < iters; i++) { for (int i = 0; i < iters; i++) {
Integer integer = Integer.valueOf(randomIntBetween(0, 10)); Integer integer = Integer.valueOf(randomIntBetween(0, 10));
String value = "" + (randomBoolean() ? integer.intValue() : integer.floatValue()); String value = "" + (randomBoolean() ? integer.intValue() : integer.floatValue());
@ -183,7 +183,7 @@ public class FuzzinessTests extends ElasticsearchTestCase {
assertThat((double) Fuzziness.TWO.asSimilarity("abcefg"), closeTo(0.66f, 0.05)); assertThat((double) Fuzziness.TWO.asSimilarity("abcefg"), closeTo(0.66f, 0.05));
assertThat((double) Fuzziness.ONE.asSimilarity("ab"), closeTo(0.5f, 0.05)); assertThat((double) Fuzziness.ONE.asSimilarity("ab"), closeTo(0.5f, 0.05));
int iters = atLeast(100); int iters = randomIntBetween(100, 1000);
for (int i = 0; i < iters; i++) { for (int i = 0; i < iters; i++) {
Fuzziness fuzziness = Fuzziness.fromEdits(between(1, 2)); Fuzziness fuzziness = Fuzziness.fromEdits(between(1, 2));
String string = rarely() ? randomRealisticUnicodeOfLengthBetween(2, 4) : String string = rarely() ? randomRealisticUnicodeOfLengthBetween(2, 4) :

View File

@ -45,7 +45,7 @@ public class ByteUtilsTests extends ElasticsearchTestCase {
} }
public void testFloat() throws IOException { public void testFloat() throws IOException {
final float[] data = new float[atLeast(1000)]; final float[] data = new float[scaledRandomIntBetween(1000, 10000)];
final byte[] encoded = new byte[data.length * 4]; final byte[] encoded = new byte[data.length * 4];
for (int i = 0; i < data.length; ++i) { for (int i = 0; i < data.length; ++i) {
data[i] = randomFloat(); data[i] = randomFloat();
@ -57,7 +57,7 @@ public class ByteUtilsTests extends ElasticsearchTestCase {
} }
public void testDouble() throws IOException { public void testDouble() throws IOException {
final double[] data = new double[atLeast(1000)]; final double[] data = new double[scaledRandomIntBetween(1000, 10000)];
final byte[] encoded = new byte[data.length * 8]; final byte[] encoded = new byte[data.length * 8];
for (int i = 0; i < data.length; ++i) { for (int i = 0; i < data.length; ++i) {
data[i] = randomDouble(); data[i] = randomDouble();
@ -69,7 +69,7 @@ public class ByteUtilsTests extends ElasticsearchTestCase {
} }
public void testVLong() throws IOException { public void testVLong() throws IOException {
final long[] data = new long[atLeast(1000)]; final long[] data = new long[scaledRandomIntBetween(1000, 10000)];
for (int i = 0; i < data.length; ++i) { for (int i = 0; i < data.length; ++i) {
switch (randomInt(4)) { switch (randomInt(4)) {
case 0: case 0:

View File

@ -94,7 +94,7 @@ public class BytesRefHashTests extends ElasticsearchTestCase {
@Test @Test
public void testSize() { public void testSize() {
BytesRef ref = new BytesRef(); BytesRef ref = new BytesRef();
int num = atLeast(2); int num = scaledRandomIntBetween(2, 20);
for (int j = 0; j < num; j++) { for (int j = 0; j < num; j++) {
final int mod = 1+randomInt(40); final int mod = 1+randomInt(40);
for (int i = 0; i < 797; i++) { for (int i = 0; i < 797; i++) {
@ -126,7 +126,7 @@ public class BytesRefHashTests extends ElasticsearchTestCase {
public void testGet() { public void testGet() {
BytesRef ref = new BytesRef(); BytesRef ref = new BytesRef();
BytesRef scratch = new BytesRef(); BytesRef scratch = new BytesRef();
int num = atLeast(2); int num = scaledRandomIntBetween(2, 20);
for (int j = 0; j < num; j++) { for (int j = 0; j < num; j++) {
Map<String, Long> strings = new HashMap<String, Long>(); Map<String, Long> strings = new HashMap<String, Long>();
int uniqueCount = 0; int uniqueCount = 0;
@ -166,7 +166,7 @@ public class BytesRefHashTests extends ElasticsearchTestCase {
public void testAdd() { public void testAdd() {
BytesRef ref = new BytesRef(); BytesRef ref = new BytesRef();
BytesRef scratch = new BytesRef(); BytesRef scratch = new BytesRef();
int num = atLeast(2); int num = scaledRandomIntBetween(2, 20);
for (int j = 0; j < num; j++) { for (int j = 0; j < num; j++) {
Set<String> strings = new HashSet<String>(); Set<String> strings = new HashSet<String>();
int uniqueCount = 0; int uniqueCount = 0;
@ -202,7 +202,7 @@ public class BytesRefHashTests extends ElasticsearchTestCase {
public void testFind() throws Exception { public void testFind() throws Exception {
BytesRef ref = new BytesRef(); BytesRef ref = new BytesRef();
BytesRef scratch = new BytesRef(); BytesRef scratch = new BytesRef();
int num = atLeast(2); int num = scaledRandomIntBetween(2, 20);
for (int j = 0; j < num; j++) { for (int j = 0; j < num; j++) {
Set<String> strings = new HashSet<String>(); Set<String> strings = new HashSet<String>();
int uniqueCount = 0; int uniqueCount = 0;

View File

@ -30,7 +30,7 @@ public class DoubleObjectHashMapTests extends ElasticsearchTestCase {
final DoubleObjectOpenHashMap<Object> map1 = new DoubleObjectOpenHashMap<Object>(); final DoubleObjectOpenHashMap<Object> map1 = new DoubleObjectOpenHashMap<Object>();
final DoubleObjectPagedHashMap<Object> map2 = new DoubleObjectPagedHashMap<Object>(randomInt(42), 0.6f + randomFloat() * 0.39f, BigArraysTests.randombigArrays()); final DoubleObjectPagedHashMap<Object> map2 = new DoubleObjectPagedHashMap<Object>(randomInt(42), 0.6f + randomFloat() * 0.39f, BigArraysTests.randombigArrays());
final int maxKey = randomIntBetween(1, 10000); final int maxKey = randomIntBetween(1, 10000);
final int iters = atLeast(10000); final int iters = scaledRandomIntBetween(10000, 100000);
for (int i = 0; i < iters; ++i) { for (int i = 0; i < iters; ++i) {
final boolean put = randomBoolean(); final boolean put = randomBoolean();
final int iters2 = randomIntBetween(1, 100); final int iters2 = randomIntBetween(1, 100);

View File

@ -30,7 +30,7 @@ public class LongObjectHashMapTests extends ElasticsearchTestCase {
final LongObjectOpenHashMap<Object> map1 = new LongObjectOpenHashMap<Object>(); final LongObjectOpenHashMap<Object> map1 = new LongObjectOpenHashMap<Object>();
final LongObjectPagedHashMap<Object> map2 = new LongObjectPagedHashMap<Object>(randomInt(42), 0.6f + randomFloat() * 0.39f, BigArraysTests.randombigArrays()); final LongObjectPagedHashMap<Object> map2 = new LongObjectPagedHashMap<Object>(randomInt(42), 0.6f + randomFloat() * 0.39f, BigArraysTests.randombigArrays());
final int maxKey = randomIntBetween(1, 10000); final int maxKey = randomIntBetween(1, 10000);
final int iters = atLeast(10000); final int iters = scaledRandomIntBetween(10000, 100000);
for (int i = 0; i < iters; ++i) { for (int i = 0; i < iters; ++i) {
final boolean put = randomBoolean(); final boolean put = randomBoolean();
final int iters2 = randomIntBetween(1, 100); final int iters2 = randomIntBetween(1, 100);

View File

@ -19,7 +19,6 @@
package org.elasticsearch.common.util.concurrent; package org.elasticsearch.common.util.concurrent;
import com.carrotsearch.randomizedtesting.annotations.Repeat;
import org.elasticsearch.test.ElasticsearchTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.junit.Test; import org.junit.Test;
@ -33,11 +32,11 @@ import static org.hamcrest.Matchers.greaterThan;
public class CountDownTest extends ElasticsearchTestCase { public class CountDownTest extends ElasticsearchTestCase {
@Test @Repeat(iterations = 1000) @Test
public void testConcurrent() throws InterruptedException { public void testConcurrent() throws InterruptedException {
final AtomicInteger count = new AtomicInteger(0); final AtomicInteger count = new AtomicInteger(0);
final CountDown countDown = new CountDown(atLeast(10)); final CountDown countDown = new CountDown(scaledRandomIntBetween(10, 1000));
Thread[] threads = new Thread[atLeast(3)]; Thread[] threads = new Thread[between(3, 10)];
final CountDownLatch latch = new CountDownLatch(1); final CountDownLatch latch = new CountDownLatch(1);
for (int i = 0; i < threads.length; i++) { for (int i = 0; i < threads.length; i++) {
threads[i] = new Thread() { threads[i] = new Thread() {
@ -83,7 +82,7 @@ public class CountDownTest extends ElasticsearchTestCase {
@Test @Test
public void testSingleThreaded() { public void testSingleThreaded() {
int atLeast = atLeast(10); int atLeast = scaledRandomIntBetween(10, 1000);
final CountDown countDown = new CountDown(atLeast); final CountDown countDown = new CountDown(atLeast);
while(!countDown.isCountedDown()) { while(!countDown.isCountedDown()) {
atLeast--; atLeast--;

View File

@ -45,7 +45,7 @@ public class SimpleCountTests extends ElasticsearchIntegrationTest {
client().prepareIndex("test", "type", "5").setSource("field", "value"), client().prepareIndex("test", "type", "5").setSource("field", "value"),
client().prepareIndex("test", "type", "6").setSource("field", "value")); client().prepareIndex("test", "type", "6").setSource("field", "value"));
int iters = atLeast(10); int iters = scaledRandomIntBetween(10, 100);
for (int i = 0; i < iters; i++) { for (int i = 0; i < iters; i++) {
// id is not indexed, but lets see that we automatically convert to // id is not indexed, but lets see that we automatically convert to
CountResponse countResponse = client().prepareCount().setQuery(QueryBuilders.matchAllQuery()).setPreference(randomUnicodeOfLengthBetween(0, 4)).get(); CountResponse countResponse = client().prepareCount().setQuery(QueryBuilders.matchAllQuery()).setPreference(randomUnicodeOfLengthBetween(0, 4)).get();

View File

@ -125,7 +125,7 @@ public class DeleteByQueryTests extends ElasticsearchIntegrationTest {
@Test @Test
public void testDeleteByFieldQuery() throws Exception { public void testDeleteByFieldQuery() throws Exception {
client().admin().indices().prepareCreate("test").execute().actionGet(); client().admin().indices().prepareCreate("test").execute().actionGet();
int numDocs = atLeast(10); int numDocs = scaledRandomIntBetween(10, 100);
for (int i = 0; i < numDocs; i++) { for (int i = 0; i < numDocs; i++) {
client().prepareIndex("test", "test", Integer.toString(i)) client().prepareIndex("test", "test", Integer.toString(i))
.setRouting(randomAsciiOfLengthBetween(1, 5)) .setRouting(randomAsciiOfLengthBetween(1, 5))

View File

@ -280,26 +280,37 @@ public class IndexGatewayTests extends ElasticsearchIntegrationTest {
logger.info("--> refreshing and checking count"); logger.info("--> refreshing and checking count");
client().admin().indices().prepareRefresh().execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet();
assertThat(client().prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount(), equalTo(0l)); assertThat(client().prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount(), equalTo(0l));
long numDocs = between(100, rarely() ? 2000 : 1000);
logger.info("--> indexing 1234 docs"); logger.info("--> indexing " + numDocs + " docs");
for (long i = 0; i < 1234; i++) { boolean hasSnapshoted = false;
boolean hasFlushed = false;
for (long i = 0; i < numDocs; i++) {
client().prepareIndex("test", "type1", Long.toString(i)) client().prepareIndex("test", "type1", Long.toString(i))
.setCreate(true) // make sure we use create, so if we recover wrongly, we will get increments... .setCreate(true) // make sure we use create, so if we recover wrongly, we will get increments...
.setSource(MapBuilder.<String, Object>newMapBuilder().put("test", "value" + i).map()).execute().actionGet(); .setSource(MapBuilder.<String, Object>newMapBuilder().put("test", "value" + i).map()).execute().actionGet();
// snapshot every 100 so we get some actions going on in the gateway // snapshot every 100 so we get some actions going on in the gateway
if ((i % 11) == 0) { if (rarely()) {
hasSnapshoted = true;
client().admin().indices().prepareGatewaySnapshot().execute().actionGet(); client().admin().indices().prepareGatewaySnapshot().execute().actionGet();
} }
// flush every once is a while, so we get different data // flush every once is a while, so we get different data
if ((i % 55) == 0) { if (rarely()) {
hasFlushed = true;
client().admin().indices().prepareFlush().execute().actionGet(); client().admin().indices().prepareFlush().execute().actionGet();
} }
} }
if (!hasSnapshoted) {
client().admin().indices().prepareGatewaySnapshot().execute().actionGet();
}
if (!hasFlushed) {
client().admin().indices().prepareFlush().execute().actionGet();
}
logger.info("--> refreshing and checking count"); logger.info("--> refreshing and checking count");
client().admin().indices().prepareRefresh().execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet();
assertThat(client().prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount(), equalTo(1234l)); assertThat(client().prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount(), equalTo(numDocs));
logger.info("--> closing the server"); logger.info("--> closing the server");
@ -319,7 +330,7 @@ public class IndexGatewayTests extends ElasticsearchIntegrationTest {
assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.YELLOW)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.YELLOW));
logger.info("--> checking count"); logger.info("--> checking count");
assertThat(client().prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount(), equalTo(1234l)); assertThat(client().prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount(), equalTo(numDocs));
logger.info("--> checking reuse / recovery status"); logger.info("--> checking reuse / recovery status");
IndicesStatusResponse statusResponse = client().admin().indices().prepareStatus().setRecovery(true).execute().actionGet(); IndicesStatusResponse statusResponse = client().admin().indices().prepareStatus().setRecovery(true).execute().actionGet();

View File

@ -24,6 +24,7 @@ import org.junit.Ignore;
import java.io.IOException; import java.io.IOException;
import static com.carrotsearch.randomizedtesting.RandomizedTest.scaledRandomIntBetween;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_CREATED; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_CREATED;
/** /**
@ -32,7 +33,7 @@ public class AnalyzerBackwardsCompatTests extends ElasticsearchTokenStreamTestCa
@Ignore @Ignore
private void testNoStopwordsAfter(org.elasticsearch.Version noStopwordVersion, String type) throws IOException { private void testNoStopwordsAfter(org.elasticsearch.Version noStopwordVersion, String type) throws IOException {
final int iters = atLeast(10); final int iters = scaledRandomIntBetween(10, 100);
org.elasticsearch.Version version = org.elasticsearch.Version.CURRENT; org.elasticsearch.Version version = org.elasticsearch.Version.CURRENT;
for (int i = 0; i < iters; i++) { for (int i = 0; i < iters; i++) {
ImmutableSettings.Builder builder = ImmutableSettings.settingsBuilder().put("index.analysis.filter.my_stop.type", "stop"); ImmutableSettings.Builder builder = ImmutableSettings.settingsBuilder().put("index.analysis.filter.my_stop.type", "stop");

View File

@ -43,6 +43,7 @@ import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Random; import java.util.Random;
import static com.carrotsearch.randomizedtesting.RandomizedTest.scaledRandomIntBetween;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
public class NGramTokenizerFactoryTests extends ElasticsearchTokenStreamTestCase { public class NGramTokenizerFactoryTests extends ElasticsearchTokenStreamTestCase {
@ -108,7 +109,7 @@ public class NGramTokenizerFactoryTests extends ElasticsearchTokenStreamTestCase
@Test @Test
public void testBackwardsCompatibilityEdgeNgramTokenizer() throws IllegalArgumentException, IllegalAccessException { public void testBackwardsCompatibilityEdgeNgramTokenizer() throws IllegalArgumentException, IllegalAccessException {
int iters = atLeast(20); int iters = scaledRandomIntBetween(20, 100);
final Index index = new Index("test"); final Index index = new Index("test");
final String name = "ngr"; final String name = "ngr";
for (int i = 0; i < iters; i++) { for (int i = 0; i < iters; i++) {
@ -150,7 +151,7 @@ public class NGramTokenizerFactoryTests extends ElasticsearchTokenStreamTestCase
@Test @Test
public void testBackwardsCompatibilityNgramTokenizer() throws IllegalArgumentException, IllegalAccessException { public void testBackwardsCompatibilityNgramTokenizer() throws IllegalArgumentException, IllegalAccessException {
int iters = atLeast(20); int iters = scaledRandomIntBetween(20, 100);
for (int i = 0; i < iters; i++) { for (int i = 0; i < iters; i++) {
final Index index = new Index("test"); final Index index = new Index("test");
final String name = "ngr"; final String name = "ngr";
@ -183,7 +184,7 @@ public class NGramTokenizerFactoryTests extends ElasticsearchTokenStreamTestCase
@Test @Test
public void testBackwardsCompatibilityEdgeNgramTokenFilter() throws IllegalArgumentException, IllegalAccessException { public void testBackwardsCompatibilityEdgeNgramTokenFilter() throws IllegalArgumentException, IllegalAccessException {
int iters = atLeast(20); int iters = scaledRandomIntBetween(20, 100);
for (int i = 0; i < iters; i++) { for (int i = 0; i < iters; i++) {
final Index index = new Index("test"); final Index index = new Index("test");
final String name = "ngr"; final String name = "ngr";

View File

@ -62,7 +62,7 @@ public class PreBuiltAnalyzerTests extends ElasticsearchTestCase {
// special case, these two are the same instance // special case, these two are the same instance
assertThat(currentDefaultAnalyzer, is(currentStandardAnalyzer)); assertThat(currentDefaultAnalyzer, is(currentStandardAnalyzer));
PreBuiltAnalyzers.DEFAULT.getAnalyzer(Version.V_1_0_0_Beta1); PreBuiltAnalyzers.DEFAULT.getAnalyzer(Version.V_1_0_0_Beta1);
final int n = atLeast(10); final int n = scaledRandomIntBetween(10, 100);
Version version = Version.CURRENT; Version version = Version.CURRENT;
for(int i = 0; i < n; i++) { for(int i = 0; i < n; i++) {
if (version.equals(Version.V_1_0_0_Beta1)) { if (version.equals(Version.V_1_0_0_Beta1)) {
@ -95,7 +95,7 @@ public class PreBuiltAnalyzerTests extends ElasticsearchTestCase {
public void testAnalyzerChangedIn10RC1() throws IOException { public void testAnalyzerChangedIn10RC1() throws IOException {
Analyzer pattern = PreBuiltAnalyzers.PATTERN.getAnalyzer(Version.V_1_0_0_RC1); Analyzer pattern = PreBuiltAnalyzers.PATTERN.getAnalyzer(Version.V_1_0_0_RC1);
Analyzer standardHtml = PreBuiltAnalyzers.STANDARD_HTML_STRIP.getAnalyzer(Version.V_1_0_0_RC1); Analyzer standardHtml = PreBuiltAnalyzers.STANDARD_HTML_STRIP.getAnalyzer(Version.V_1_0_0_RC1);
final int n = atLeast(10); final int n = scaledRandomIntBetween(10, 100);
Version version = Version.CURRENT; Version version = Version.CURRENT;
for(int i = 0; i < n; i++) { for(int i = 0; i < n; i++) {
if (version.equals(Version.V_1_0_0_RC1)) { if (version.equals(Version.V_1_0_0_RC1)) {

View File

@ -222,7 +222,7 @@ public abstract class AbstractStringFieldDataTests extends AbstractFieldDataImpl
for (int i = 1; i < values.length; ++i) { for (int i = 1; i < values.length; ++i) {
values[i] = _TestUtil.randomUnicodeString(getRandom()); values[i] = _TestUtil.randomUnicodeString(getRandom());
} }
final int numDocs = atLeast(100); final int numDocs = scaledRandomIntBetween(100, 10000);
for (int i = 0; i < numDocs; ++i) { for (int i = 0; i < numDocs; ++i) {
final String value = RandomPicks.randomFrom(getRandom(), values); final String value = RandomPicks.randomFrom(getRandom(), values);
if (value == null) { if (value == null) {
@ -284,7 +284,7 @@ public abstract class AbstractStringFieldDataTests extends AbstractFieldDataImpl
for (int i = 1; i < values.length; ++i) { for (int i = 1; i < values.length; ++i) {
values[i] = _TestUtil.randomUnicodeString(getRandom()); values[i] = _TestUtil.randomUnicodeString(getRandom());
} }
final int numDocs = atLeast(100); final int numDocs = scaledRandomIntBetween(100, 10000);
for (int i = 0; i < numDocs; ++i) { for (int i = 0; i < numDocs; ++i) {
final String value = RandomPicks.randomFrom(getRandom(), values); final String value = RandomPicks.randomFrom(getRandom(), values);
if (value == null) { if (value == null) {
@ -338,7 +338,7 @@ public abstract class AbstractStringFieldDataTests extends AbstractFieldDataImpl
for (int i = 0; i < values.length; ++i) { for (int i = 0; i < values.length; ++i) {
values[i] = _TestUtil.randomSimpleString(getRandom()); values[i] = _TestUtil.randomSimpleString(getRandom());
} }
final int numParents = atLeast(100); final int numParents = scaledRandomIntBetween(100, 10000);
List<Document> docs = new ArrayList<Document>(); List<Document> docs = new ArrayList<Document>();
final OpenBitSet parents = new OpenBitSet(); final OpenBitSet parents = new OpenBitSet();
for (int i = 0; i < numParents; ++i) { for (int i = 0; i < numParents; ++i) {

View File

@ -53,12 +53,6 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
return null; return null;
} }
public static int atLeast(Random random, int i) {
int min = i;
int max = min + (min / 2);
return min + random.nextInt(max - min);
}
@Test @Test
public void testDuelAllTypesSingleValue() throws Exception { public void testDuelAllTypesSingleValue() throws Exception {
final String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") final String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
@ -73,7 +67,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
.endObject().endObject().endObject().string(); .endObject().endObject().endObject().string();
final DocumentMapper mapper = MapperTestUtils.newParser().parse(mapping); final DocumentMapper mapper = MapperTestUtils.newParser().parse(mapping);
Random random = getRandom(); Random random = getRandom();
int atLeast = atLeast(random, 1000); int atLeast = scaledRandomIntBetween(1000, 1500);
for (int i = 0; i < atLeast; i++) { for (int i = 0; i < atLeast; i++) {
String s = Integer.toString(randomByte()); String s = Integer.toString(randomByte());
@ -152,7 +146,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
final DocumentMapper mapper = MapperTestUtils.newParser().parse(mapping); final DocumentMapper mapper = MapperTestUtils.newParser().parse(mapping);
Random random = getRandom(); Random random = getRandom();
int atLeast = atLeast(random, 1000); int atLeast = scaledRandomIntBetween(1000, 1500);
final int maxNumValues = randomBoolean() ? 1 : randomIntBetween(2, 40); final int maxNumValues = randomBoolean() ? 1 : randomIntBetween(2, 40);
byte[] values = new byte[maxNumValues]; byte[] values = new byte[maxNumValues];
for (int i = 0; i < atLeast; i++) { for (int i = 0; i < atLeast; i++) {
@ -230,7 +224,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
final DocumentMapper mapper = MapperTestUtils.newParser().parse(mapping); final DocumentMapper mapper = MapperTestUtils.newParser().parse(mapping);
Random random = getRandom(); Random random = getRandom();
int atLeast = atLeast(random, 1000); int atLeast = scaledRandomIntBetween(1000, 1500);
final int maxNumValues = randomBoolean() ? 1 : randomIntBetween(2, 40); final int maxNumValues = randomBoolean() ? 1 : randomIntBetween(2, 40);
float[] values = new float[maxNumValues]; float[] values = new float[maxNumValues];
for (int i = 0; i < atLeast; i++) { for (int i = 0; i < atLeast; i++) {
@ -302,7 +296,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
@Test @Test
public void testDuelStrings() throws Exception { public void testDuelStrings() throws Exception {
Random random = getRandom(); Random random = getRandom();
int atLeast = atLeast(random, 1000); int atLeast = scaledRandomIntBetween(1000, 1500);
for (int i = 0; i < atLeast; i++) { for (int i = 0; i < atLeast; i++) {
Document d = new Document(); Document d = new Document();
d.add(new StringField("_id", "" + i, Field.Store.NO)); d.add(new StringField("_id", "" + i, Field.Store.NO));
@ -377,7 +371,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTests {
final DocumentMapper mapper = MapperTestUtils.newParser().parse(mapping); final DocumentMapper mapper = MapperTestUtils.newParser().parse(mapping);
Random random = getRandom(); Random random = getRandom();
int atLeast = atLeast(random, 1000); int atLeast = scaledRandomIntBetween(1000, 1500);
int maxValuesPerDoc = randomBoolean() ? 1 : randomIntBetween(2, 40); int maxValuesPerDoc = randomBoolean() ? 1 : randomIntBetween(2, 40);
// to test deduplication // to test deduplication
double defaultLat = randomDouble() * 180 - 90; double defaultLat = randomDouble() * 180 - 90;

View File

@ -65,7 +65,7 @@ public class FieldDataFilterIntegrationTests extends ElasticsearchIntegrationTes
.endObject().endObject(); .endObject().endObject();
assertAcked(builder.addMapping("type", mapping)); assertAcked(builder.addMapping("type", mapping));
ensureGreen(); ensureGreen();
int numDocs = atLeast(5); int numDocs = scaledRandomIntBetween(5, 50);
for (int i = 0; i < numDocs; i++) { for (int i = 0; i < numDocs; i++) {
client().prepareIndex("test", "type", "" + 0).setSource("name", "bacon bastards", "not_filtered", "bacon bastards").get(); client().prepareIndex("test", "type", "" + 0).setSource("name", "bacon bastards", "not_filtered", "bacon bastards").get();
} }

View File

@ -1702,7 +1702,7 @@ public class SimpleIndexQueryParserTests extends ElasticsearchTestCase {
"{\"flt\": {\"fields\": [\"comment\"], \"like_text\": \"FFFdfds\",\"fuzziness\": 4}}", "{\"flt\": {\"fields\": [\"comment\"], \"like_text\": \"FFFdfds\",\"fuzziness\": 4}}",
"{\"flt\": {\"fields\": [\"comment\"], \"like_text\": \"FFFdfds\",\"fuzziness\": 4.0}}" "{\"flt\": {\"fields\": [\"comment\"], \"like_text\": \"FFFdfds\",\"fuzziness\": 4.0}}"
}; };
int iters = atLeast(5); int iters = scaledRandomIntBetween(5, 100);
for (int i = 0; i < iters; i++) { for (int i = 0; i < iters; i++) {
parsedQuery = queryParser.parse(new BytesArray((String) randomFrom(queries))).query(); parsedQuery = queryParser.parse(new BytesArray((String) randomFrom(queries))).query();
parsedQuery1 = queryParser.parse(new BytesArray((String) randomFrom(queries))).query(); parsedQuery1 = queryParser.parse(new BytesArray((String) randomFrom(queries))).query();

View File

@ -57,7 +57,7 @@ public class CircuitBreakerServiceTests extends ElasticsearchIntegrationTest {
try { try {
// index some different terms so we have some field data for loading // index some different terms so we have some field data for loading
int docCount = atLeast(300); int docCount = scaledRandomIntBetween(300, 1000);
for (long id = 0; id < docCount; id++) { for (long id = 0; id < docCount; id++) {
client.prepareIndex("cb-test", "type", Long.toString(id)) client.prepareIndex("cb-test", "type", Long.toString(id))
.setSource(MapBuilder.<String, Object>newMapBuilder().put("test", "value" + id).map()).execute().actionGet(); .setSource(MapBuilder.<String, Object>newMapBuilder().put("test", "value" + id).map()).execute().actionGet();
@ -108,7 +108,7 @@ public class CircuitBreakerServiceTests extends ElasticsearchIntegrationTest {
client.admin().cluster().prepareHealth("ramtest").setWaitForGreenStatus().setTimeout("10s").execute().actionGet(); client.admin().cluster().prepareHealth("ramtest").setWaitForGreenStatus().setTimeout("10s").execute().actionGet();
// index some different terms so we have some field data for loading // index some different terms so we have some field data for loading
int docCount = atLeast(300); int docCount = scaledRandomIntBetween(300, 1000);
for (long id = 0; id < docCount; id++) { for (long id = 0; id < docCount; id++) {
client.prepareIndex("ramtest", "type", Long.toString(id)) client.prepareIndex("ramtest", "type", Long.toString(id))
.setSource(MapBuilder.<String, Object>newMapBuilder().put("test", "value" + id).map()).execute().actionGet(); .setSource(MapBuilder.<String, Object>newMapBuilder().put("test", "value" + id).map()).execute().actionGet();

View File

@ -140,7 +140,7 @@ public class RandomExceptionCircuitBreakerTests extends ElasticsearchIntegration
logger.info("Refresh failed: [{}] numShardsFailed: [{}], shardFailuresLength: [{}], successfulShards: [{}], totalShards: [{}] ", logger.info("Refresh failed: [{}] numShardsFailed: [{}], shardFailuresLength: [{}], successfulShards: [{}], totalShards: [{}] ",
refreshFailed, refreshResponse.getFailedShards(), refreshResponse.getShardFailures().length, refreshFailed, refreshResponse.getFailedShards(), refreshResponse.getShardFailures().length,
refreshResponse.getSuccessfulShards(), refreshResponse.getTotalShards()); refreshResponse.getSuccessfulShards(), refreshResponse.getTotalShards());
final int numSearches = atLeast(50); final int numSearches = scaledRandomIntBetween(50, 150);
NodesStatsResponse resp = client().admin().cluster().prepareNodesStats() NodesStatsResponse resp = client().admin().cluster().prepareNodesStats()
.clear().setBreaker(true).execute().actionGet(); .clear().setBreaker(true).execute().actionGet();
for (NodeStats stats : resp.getNodes()) { for (NodeStats stats : resp.getNodes()) {

View File

@ -50,13 +50,13 @@ public class ConcurrentDynamicTemplateTests extends ElasticsearchIntegrationTest
// The 'fieldNames' array is used to help with retrieval of index terms // The 'fieldNames' array is used to help with retrieval of index terms
// after testing // after testing
int iters = atLeast(5); int iters = scaledRandomIntBetween(5, 15);
for (int i = 0; i < iters; i++) { for (int i = 0; i < iters; i++) {
wipeIndices("test"); wipeIndices("test");
assertAcked(prepareCreate("test") assertAcked(prepareCreate("test")
.addMapping(mappingType, mapping)); .addMapping(mappingType, mapping));
ensureYellow(); ensureYellow();
int numDocs = atLeast(10); int numDocs = scaledRandomIntBetween(10, 100);
final CountDownLatch latch = new CountDownLatch(numDocs); final CountDownLatch latch = new CountDownLatch(numDocs);
final List<Throwable> throwable = new CopyOnWriteArrayList<Throwable>(); final List<Throwable> throwable = new CopyOnWriteArrayList<Throwable>();
int currentID = 0; int currentID = 0;

View File

@ -79,7 +79,7 @@ public class IndexTemplateFileLoadingTests extends ElasticsearchIntegrationTest
@Test @Test
public void testThatLoadingTemplateFromFileWorks() throws Exception { public void testThatLoadingTemplateFromFileWorks() throws Exception {
final int iters = atLeast(5); final int iters = scaledRandomIntBetween(5, 20);
Set<String> indices = new HashSet<String>(); Set<String> indices = new HashSet<String>();
for (int i = 0; i < iters; i++) { for (int i = 0; i < iters; i++) {
String indexName = "foo" + randomRealisticUnicodeOfLengthBetween(0, 5); String indexName = "foo" + randomRealisticUnicodeOfLengthBetween(0, 5);

View File

@ -51,7 +51,7 @@ public class PercolatorFacetsAndAggregationsTests extends ElasticsearchIntegrati
client().admin().indices().prepareCreate("test").execute().actionGet(); client().admin().indices().prepareCreate("test").execute().actionGet();
ensureGreen(); ensureGreen();
int numQueries = atLeast(250); int numQueries = scaledRandomIntBetween(250, 500);
int numUniqueQueries = between(1, numQueries / 2); int numUniqueQueries = between(1, numQueries / 2);
String[] values = new String[numUniqueQueries]; String[] values = new String[numUniqueQueries];
for (int i = 0; i < values.length; i++) { for (int i = 0; i < values.length; i++) {

View File

@ -310,7 +310,7 @@ public class RecoveryWhileUnderLoadTests extends ElasticsearchIntegrationTest {
final AtomicLong idGenerator = new AtomicLong(); final AtomicLong idGenerator = new AtomicLong();
final AtomicLong indexCounter = new AtomicLong(); final AtomicLong indexCounter = new AtomicLong();
final AtomicBoolean stop = new AtomicBoolean(false); final AtomicBoolean stop = new AtomicBoolean(false);
Thread[] writers = new Thread[atLeast(3)]; Thread[] writers = new Thread[scaledRandomIntBetween(3, 10)];
final CountDownLatch stopLatch = new CountDownLatch(writers.length); final CountDownLatch stopLatch = new CountDownLatch(writers.length);
logger.info("--> starting {} indexing threads", writers.length); logger.info("--> starting {} indexing threads", writers.length);
final CopyOnWriteArrayList<Throwable> failures = new CopyOnWriteArrayList<Throwable>(); final CopyOnWriteArrayList<Throwable> failures = new CopyOnWriteArrayList<Throwable>();

View File

@ -108,8 +108,8 @@ public class RelocationTests extends ElasticsearchIntegrationTest {
@Test @Test
@Slow @Slow
public void testPrimaryRelocationWhileIndexingRandom() throws Exception { public void testPrimaryRelocationWhileIndexingRandom() throws Exception {
int numRelocations = atLeast(rarely() ? 3 : 1); int numRelocations = scaledRandomIntBetween(1, rarely() ? 10 : 4);
int numWriters = atLeast(rarely() ? 3 : 1); int numWriters = scaledRandomIntBetween(1, rarely() ? 10 : 4);
boolean batch = getRandom().nextBoolean(); boolean batch = getRandom().nextBoolean();
logger.info("testPrimaryRelocationWhileIndexingRandom(numRelocations={}, numWriters={}, batch={}", logger.info("testPrimaryRelocationWhileIndexingRandom(numRelocations={}, numWriters={}, batch={}",
numRelocations, numWriters, batch); numRelocations, numWriters, batch);
@ -261,8 +261,8 @@ public class RelocationTests extends ElasticsearchIntegrationTest {
@Test @Test
@Slow @Slow
public void testReplicaRelocationWhileIndexingRandom() throws Exception { public void testReplicaRelocationWhileIndexingRandom() throws Exception {
int numRelocations = atLeast(rarely() ? 3 : 1); int numRelocations = scaledRandomIntBetween(1, rarely() ? 10 : 4);
int numWriters = atLeast(rarely() ? 3 : 1); int numWriters = scaledRandomIntBetween(1, rarely() ? 10 : 4);
boolean batch = getRandom().nextBoolean(); boolean batch = getRandom().nextBoolean();
logger.info("testReplicaRelocationWhileIndexing(numRelocations={}, numWriters={}, batch={}", numRelocations, numWriters, batch); logger.info("testReplicaRelocationWhileIndexing(numRelocations={}, numWriters={}, batch={}", numRelocations, numWriters, batch);
testReplicaRelocationWhileIndexing(numRelocations, numWriters, batch); testReplicaRelocationWhileIndexing(numRelocations, numWriters, batch);

View File

@ -26,13 +26,13 @@ import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope;
import org.junit.Test; import org.junit.Test;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.test.ElasticsearchIntegrationTest.*; import static org.elasticsearch.test.ElasticsearchIntegrationTest.Scope.SUITE;
import static org.elasticsearch.test.ElasticsearchIntegrationTest.Scope.*;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
@ -48,7 +48,7 @@ public class StressSearchServiceReaperTest extends ElasticsearchIntegrationTest
@Slow @Slow
@Test // see issue #5165 - this test fails each time without the fix in pull #5170 @Test // see issue #5165 - this test fails each time without the fix in pull #5170
public void testStressReaper() throws ExecutionException, InterruptedException { public void testStressReaper() throws ExecutionException, InterruptedException {
int num = atLeast(100); int num = randomIntBetween(100, 150);
IndexRequestBuilder[] builders = new IndexRequestBuilder[num]; IndexRequestBuilder[] builders = new IndexRequestBuilder[num];
for (int i = 0; i < builders.length; i++) { for (int i = 0; i < builders.length; i++) {
builders[i] = client().prepareIndex("test", "type", "" + i).setSource("f", English.intToEnglish(i)); builders[i] = client().prepareIndex("test", "type", "" + i).setSource("f", English.intToEnglish(i));
@ -56,7 +56,7 @@ public class StressSearchServiceReaperTest extends ElasticsearchIntegrationTest
createIndex("test"); createIndex("test");
indexRandom(true, builders); indexRandom(true, builders);
ensureYellow(); ensureYellow();
final int iterations = atLeast(500); final int iterations = scaledRandomIntBetween(500, 1000);
for (int i = 0; i < iterations; i++) { for (int i = 0; i < iterations; i++) {
SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()).setSize(num).get(); SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()).setSize(num).get();
assertNoFailures(searchResponse); assertNoFailures(searchResponse);

View File

@ -51,7 +51,7 @@ public class RandomTests extends ElasticsearchIntegrationTest {
// Make sure that unordered, reversed, disjoint and/or overlapping ranges are supported // Make sure that unordered, reversed, disjoint and/or overlapping ranges are supported
// Duel with filters // Duel with filters
public void testRandomRanges() throws Exception { public void testRandomRanges() throws Exception {
final int numDocs = atLeast(1000); final int numDocs = scaledRandomIntBetween(1000, 10000);
final double[][] docs = new double[numDocs][]; final double[][] docs = new double[numDocs][];
for (int i = 0; i < numDocs; ++i) { for (int i = 0; i < numDocs; ++i) {
final int numValues = randomInt(5); final int numValues = randomInt(5);
@ -142,7 +142,7 @@ public class RandomTests extends ElasticsearchIntegrationTest {
// test long/double/string terms aggs with high number of buckets that require array growth // test long/double/string terms aggs with high number of buckets that require array growth
public void testDuelTerms() throws Exception { public void testDuelTerms() throws Exception {
// These high numbers of docs and terms are important to trigger page recycling // These high numbers of docs and terms are important to trigger page recycling
final int numDocs = atLeast(10000); final int numDocs = scaledRandomIntBetween(10000, 20000);
final int maxNumTerms = randomIntBetween(10, 100000); final int maxNumTerms = randomIntBetween(10, 100000);
final IntOpenHashSet valuesSet = new IntOpenHashSet(); final IntOpenHashSet valuesSet = new IntOpenHashSet();
@ -221,7 +221,7 @@ public class RandomTests extends ElasticsearchIntegrationTest {
public void testDuelTermsHistogram() throws Exception { public void testDuelTermsHistogram() throws Exception {
createIndex("idx"); createIndex("idx");
final int numDocs = atLeast(1000); final int numDocs = scaledRandomIntBetween(1000, 5000);
final int maxNumTerms = randomIntBetween(10, 2000); final int maxNumTerms = randomIntBetween(10, 2000);
final int interval = randomIntBetween(1, 100); final int interval = randomIntBetween(1, 100);
@ -267,7 +267,8 @@ public class RandomTests extends ElasticsearchIntegrationTest {
// test high numbers of percentile buckets to make sure paging and release work correctly // test high numbers of percentile buckets to make sure paging and release work correctly
createIndex("idx"); createIndex("idx");
final int numDocs = atLeast(25000); final int numDocs = scaledRandomIntBetween(25000, 50000);
logger.info("Indexing [" + numDocs +"] docs");
int t = 0; int t = 0;
for (int i = 0; i < numDocs; ) { for (int i = 0; i < numDocs; ) {
BulkRequestBuilder request = client().prepareBulk(); BulkRequestBuilder request = client().prepareBulk();

View File

@ -37,7 +37,7 @@ public class GroupTreeTests extends ElasticsearchTestCase {
GroupRedBlackTree tree2 = new GroupRedBlackTree(randomInt(100)); GroupRedBlackTree tree2 = new GroupRedBlackTree(randomInt(100));
// Add elements // Add elements
final int elements = atLeast(100); final int elements = scaledRandomIntBetween(100, 1000);
for (int i = 0; i < elements; ++i) { for (int i = 0; i < elements; ++i) {
final double centroid = randomDouble(); final double centroid = randomDouble();
final int count = randomIntBetween(1, 5); final int count = randomIntBetween(1, 5);

View File

@ -106,7 +106,7 @@ public class RedBlackTreeTests extends ElasticsearchTestCase {
public void testAdd() { public void testAdd() {
Map<Integer, Integer> map = Maps.newHashMap(); Map<Integer, Integer> map = Maps.newHashMap();
IntRedBlackTree tree = new IntRedBlackTree(); IntRedBlackTree tree = new IntRedBlackTree();
final int iters = atLeast(1000); final int iters = scaledRandomIntBetween(1000, 10000);
for (int i = 0; i < iters; ++i) { for (int i = 0; i < iters; ++i) {
final int value = randomInt(200); final int value = randomInt(200);
final boolean added = tree.add(value); final boolean added = tree.add(value);
@ -133,7 +133,7 @@ public class RedBlackTreeTests extends ElasticsearchTestCase {
} }
public void testRemove() { public void testRemove() {
final int numValues = atLeast(200); final int numValues = scaledRandomIntBetween(200, 1000);
final FixedBitSet values = new FixedBitSet(numValues); final FixedBitSet values = new FixedBitSet(numValues);
values.set(0, numValues); values.set(0, numValues);
IntRedBlackTree tree = new IntRedBlackTree(); IntRedBlackTree tree = new IntRedBlackTree();
@ -141,7 +141,7 @@ public class RedBlackTreeTests extends ElasticsearchTestCase {
tree.add(i); tree.add(i);
} }
final int iters = atLeast(300); final int iters = scaledRandomIntBetween(300, 1000);
for (int i = 0; i < iters; ++i) { for (int i = 0; i < iters; ++i) {
final int value = randomInt(numValues - 1); final int value = randomInt(numValues - 1);
final boolean removed = tree.remove(value); final boolean removed = tree.remove(value);
@ -165,7 +165,7 @@ public class RedBlackTreeTests extends ElasticsearchTestCase {
public void testReverse() { public void testReverse() {
IntRedBlackTree tree = new IntRedBlackTree(); IntRedBlackTree tree = new IntRedBlackTree();
final int iters = atLeast(1000); final int iters = scaledRandomIntBetween(1000, 10000);
for (int i = 0; i < iters; ++i) { for (int i = 0; i < iters; ++i) {
final int value = randomInt(2000); final int value = randomInt(2000);
tree.add(value); tree.add(value);

View File

@ -33,7 +33,7 @@ public class HyperLogLogPlusPlusTests extends ElasticsearchTestCase {
@Test @Test
public void encodeDecode() { public void encodeDecode() {
final int iters = atLeast(100000); final int iters = scaledRandomIntBetween(100000, 500000);
// random hashes // random hashes
for (int i = 0; i < iters; ++i) { for (int i = 0; i < iters; ++i) {
final int p1 = randomIntBetween(4, 24); final int p1 = randomIntBetween(4, 24);

View File

@ -72,11 +72,11 @@ public class SearchWhileRelocatingTests extends ElasticsearchIntegrationTest {
} }
indexRandom(true, indexBuilders.toArray(new IndexRequestBuilder[indexBuilders.size()])); indexRandom(true, indexBuilders.toArray(new IndexRequestBuilder[indexBuilders.size()]));
assertHitCount(client().prepareSearch().get(), (long) (numDocs)); assertHitCount(client().prepareSearch().get(), (long) (numDocs));
final int numIters = atLeast(10); final int numIters = scaledRandomIntBetween(10, 20);
for (int i = 0; i < numIters; i++) { for (int i = 0; i < numIters; i++) {
final AtomicBoolean stop = new AtomicBoolean(false); final AtomicBoolean stop = new AtomicBoolean(false);
final List<Throwable> thrownExceptions = new CopyOnWriteArrayList<Throwable>(); final List<Throwable> thrownExceptions = new CopyOnWriteArrayList<Throwable>();
Thread[] threads = new Thread[atLeast(1)]; Thread[] threads = new Thread[scaledRandomIntBetween(1, 3)];
for (int j = 0; j < threads.length; j++) { for (int j = 0; j < threads.length; j++) {
threads[j] = new Thread() { threads[j] = new Thread() {
public void run() { public void run() {

View File

@ -126,12 +126,11 @@ public class SearchWithRandomExceptionsTests extends ElasticsearchIntegrationTes
RefreshResponse refreshResponse = client().admin().indices().prepareRefresh("test").execute().get(); // don't assert on failures here RefreshResponse refreshResponse = client().admin().indices().prepareRefresh("test").execute().get(); // don't assert on failures here
final boolean refreshFailed = refreshResponse.getShardFailures().length != 0 || refreshResponse.getFailedShards() != 0; final boolean refreshFailed = refreshResponse.getShardFailures().length != 0 || refreshResponse.getFailedShards() != 0;
logger.info("Refresh failed [{}] numShardsFailed: [{}], shardFailuresLength: [{}], successfulShards: [{}], totalShards: [{}] ", refreshFailed, refreshResponse.getFailedShards(), refreshResponse.getShardFailures().length, refreshResponse.getSuccessfulShards(), refreshResponse.getTotalShards()); logger.info("Refresh failed [{}] numShardsFailed: [{}], shardFailuresLength: [{}], successfulShards: [{}], totalShards: [{}] ", refreshFailed, refreshResponse.getFailedShards(), refreshResponse.getShardFailures().length, refreshResponse.getSuccessfulShards(), refreshResponse.getTotalShards());
final int numSearches = atLeast(10); final int numSearches = scaledRandomIntBetween(10, 20);
// we don't check anything here really just making sure we don't leave any open files or a broken index behind. // we don't check anything here really just making sure we don't leave any open files or a broken index behind.
for (int i = 0; i < numSearches; i++) { for (int i = 0; i < numSearches; i++) {
try { try {
int docToQuery = between(0, numDocs-1); int docToQuery = between(0, numDocs-1);
long expectedResults = added[docToQuery] ? 1 : 0;
logger.info("Searching for [test:{}]", English.intToEnglish(docToQuery)); logger.info("Searching for [test:{}]", English.intToEnglish(docToQuery));
SearchResponse searchResponse = client().prepareSearch().setQuery(QueryBuilders.matchQuery("test", English.intToEnglish(docToQuery))).get(); SearchResponse searchResponse = client().prepareSearch().setQuery(QueryBuilders.matchQuery("test", English.intToEnglish(docToQuery))).get();
logger.info("Successful shards: [{}] numShards: [{}]", searchResponse.getSuccessfulShards(), test.numPrimaries); logger.info("Successful shards: [{}] numShards: [{}]", searchResponse.getSuccessfulShards(), test.numPrimaries);
@ -212,7 +211,7 @@ public class SearchWithRandomExceptionsTests extends ElasticsearchIntegrationTes
logger.info("Refresh failed [{}] numShardsFailed: [{}], shardFailuresLength: [{}], successfulShards: [{}], totalShards: [{}] ", refreshFailed, refreshResponse.getFailedShards(), refreshResponse.getShardFailures().length, refreshResponse.getSuccessfulShards(), refreshResponse.getTotalShards()); logger.info("Refresh failed [{}] numShardsFailed: [{}], shardFailuresLength: [{}], successfulShards: [{}], totalShards: [{}] ", refreshFailed, refreshResponse.getFailedShards(), refreshResponse.getShardFailures().length, refreshResponse.getSuccessfulShards(), refreshResponse.getTotalShards());
NumShards test = getNumShards("test"); NumShards test = getNumShards("test");
final int numSearches = atLeast(100); final int numSearches = scaledRandomIntBetween(100, 200);
// we don't check anything here really just making sure we don't leave any open files or a broken index behind. // we don't check anything here really just making sure we don't leave any open files or a broken index behind.
for (int i = 0; i < numSearches; i++) { for (int i = 0; i < numSearches; i++) {
try { try {

View File

@ -70,7 +70,7 @@ public class SimpleFacetsTests extends ElasticsearchIntegrationTest {
protected int numberOfRuns() { protected int numberOfRuns() {
if (numRuns == -1) { if (numRuns == -1) {
numRuns = atLeast(3); numRuns = scaledRandomIntBetween(3, 10);
} }
return numRuns; return numRuns;
} }

View File

@ -41,17 +41,17 @@ public class RandomScoreFunctionTests extends ElasticsearchIntegrationTest {
public void consistentHitsWithSameSeed() throws Exception { public void consistentHitsWithSameSeed() throws Exception {
createIndex("test"); createIndex("test");
ensureGreen(); // make sure we are done otherwise preference could change? ensureGreen(); // make sure we are done otherwise preference could change?
int docCount = atLeast(100); int docCount = randomIntBetween(100, 200);
for (int i = 0; i < docCount; i++) { for (int i = 0; i < docCount; i++) {
index("test", "type", "" + i, jsonBuilder().startObject().endObject()); index("test", "type", "" + i, jsonBuilder().startObject().endObject());
} }
flush(); flush();
refresh(); refresh();
int outerIters = atLeast(10); int outerIters = scaledRandomIntBetween(10, 20);
for (int o = 0; o < outerIters; o++) { for (int o = 0; o < outerIters; o++) {
final long seed = randomLong(); final long seed = randomLong();
final String preference = randomRealisticUnicodeOfLengthBetween(1, 10); // at least one char!! final String preference = randomRealisticUnicodeOfLengthBetween(1, 10); // at least one char!!
int innerIters = atLeast(2); int innerIters = scaledRandomIntBetween(2, 5);
SearchHits hits = null; SearchHits hits = null;
for (int i = 0; i < innerIters; i++) { for (int i = 0; i < innerIters; i++) {
SearchResponse searchResponse = client().prepareSearch() SearchResponse searchResponse = client().prepareSearch()

View File

@ -2091,7 +2091,7 @@ public class HighlighterSearchTests extends ElasticsearchIntegrationTest {
.setSource("field1", "The quick brown fox jumps over", .setSource("field1", "The quick brown fox jumps over",
"field2", "The quick brown fox jumps over").get(); "field2", "The quick brown fox jumps over").get();
refresh(); refresh();
final int iters = atLeast(20); final int iters = scaledRandomIntBetween(20, 30);
for (int i = 0; i < iters; i++) { for (int i = 0; i < iters; i++) {
MultiMatchQueryBuilder.Type matchQueryType = rarely() ? null : RandomPicks.randomFrom(getRandom(), MultiMatchQueryBuilder.Type.values()); MultiMatchQueryBuilder.Type matchQueryType = rarely() ? null : RandomPicks.randomFrom(getRandom(), MultiMatchQueryBuilder.Type.values());
final MultiMatchQueryBuilder multiMatchQueryBuilder = multiMatchQuery("the quick brown fox", "field1", "field2").type(matchQueryType); final MultiMatchQueryBuilder multiMatchQueryBuilder = multiMatchQuery("the quick brown fox", "field1", "field2").type(matchQueryType);

View File

@ -217,7 +217,7 @@ public class MatchedQueriesTests extends ElasticsearchIntegrationTest {
refresh(); refresh();
// Execute search at least two times to load it in cache // Execute search at least two times to load it in cache
int iter = atLeast(2); int iter = scaledRandomIntBetween(2, 10);
for (int i = 0; i < iter; i++) { for (int i = 0; i < iter; i++) {
SearchResponse searchResponse = client().prepareSearch() SearchResponse searchResponse = client().prepareSearch()
.setQuery( .setQuery(

View File

@ -64,7 +64,7 @@ public class MultiMatchQueryTests extends ElasticsearchIntegrationTest {
); );
assertAcked(builder.addMapping("test", createMapping())); assertAcked(builder.addMapping("test", createMapping()));
ensureGreen(); ensureGreen();
int numDocs = atLeast(50); int numDocs = scaledRandomIntBetween(50, 100);
List<IndexRequestBuilder> builders = new ArrayList<IndexRequestBuilder>(); List<IndexRequestBuilder> builders = new ArrayList<IndexRequestBuilder>();
builders.add(client().prepareIndex("test", "test", "theone").setSource( builders.add(client().prepareIndex("test", "test", "theone").setSource(
"full_name", "Captain America", "full_name", "Captain America",
@ -275,7 +275,7 @@ public class MultiMatchQueryTests extends ElasticsearchIntegrationTest {
final int numDocs = (int) client().prepareCount("test") final int numDocs = (int) client().prepareCount("test")
.setQuery(matchAllQuery()).get().getCount(); .setQuery(matchAllQuery()).get().getCount();
int numIters = atLeast(5); int numIters = scaledRandomIntBetween(5, 10);
for (int i = 0; i < numIters; i++) { for (int i = 0; i < numIters; i++) {
{ {
MatchQueryBuilder.Type type = randomBoolean() ? null : MatchQueryBuilder.Type.BOOLEAN; MatchQueryBuilder.Type type = randomBoolean() ? null : MatchQueryBuilder.Type.BOOLEAN;

View File

@ -196,7 +196,7 @@ public class SimpleQueryTests extends ElasticsearchIntegrationTest {
assertSearchHit(searchHit, hasScore(1.0f)); assertSearchHit(searchHit, hasScore(1.0f));
} }
int num = atLeast(100); int num = scaledRandomIntBetween(100, 200);
IndexRequestBuilder[] builders = new IndexRequestBuilder[num]; IndexRequestBuilder[] builders = new IndexRequestBuilder[num];
for (int i = 0; i < builders.length; i++) { for (int i = 0; i < builders.length; i++) {
builders[i] = client().prepareIndex("test", "type", "" + i).setSource("f", English.intToEnglish(i)); builders[i] = client().prepareIndex("test", "type", "" + i).setSource("f", English.intToEnglish(i));
@ -204,7 +204,7 @@ public class SimpleQueryTests extends ElasticsearchIntegrationTest {
createIndex("test_1"); createIndex("test_1");
indexRandom(true, builders); indexRandom(true, builders);
ensureYellow(); ensureYellow();
int queryRounds = atLeast(10); int queryRounds = scaledRandomIntBetween(10, 20);
for (int i = 0; i < queryRounds; i++) { for (int i = 0; i < queryRounds; i++) {
MatchQueryBuilder matchQuery = matchQuery("f", English.intToEnglish(between(0, num))); MatchQueryBuilder matchQuery = matchQuery("f", English.intToEnglish(between(0, num)));
searchResponse = client().prepareSearch("test_1").setQuery(matchQuery).setSize(num).get(); searchResponse = client().prepareSearch("test_1").setQuery(matchQuery).setSize(num).get();
@ -241,7 +241,7 @@ public class SimpleQueryTests extends ElasticsearchIntegrationTest {
indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("foo", "bar"), indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("foo", "bar"),
client().prepareIndex("test", "type1", "2").setSource("foo", "bar") client().prepareIndex("test", "type1", "2").setSource("foo", "bar")
); );
int iters = atLeast(100); int iters = scaledRandomIntBetween(100, 200);
for (int i = 0; i < iters; i++) { for (int i = 0; i < iters; i++) {
SearchResponse searchResponse = client().prepareSearch("test").setQuery(queryString("*:*^10.0").boost(10.0f)).get(); SearchResponse searchResponse = client().prepareSearch("test").setQuery(queryString("*:*^10.0").boost(10.0f)).get();
assertHitCount(searchResponse, 2l); assertHitCount(searchResponse, 2l);
@ -356,7 +356,7 @@ public class SimpleQueryTests extends ElasticsearchIntegrationTest {
@Test @Test
public void testOmitTermFreqsAndPositions() throws Exception { public void testOmitTermFreqsAndPositions() throws Exception {
Version version = Version.CURRENT; Version version = Version.CURRENT;
int iters = atLeast(10); int iters = scaledRandomIntBetween(10, 20);
for (int i = 0; i < iters; i++) { for (int i = 0; i < iters; i++) {
try { try {
// backwards compat test! // backwards compat test!

View File

@ -55,7 +55,7 @@ public class QueryRescorerTests extends ElasticsearchIntegrationTest {
public void testEnforceWindowSize() { public void testEnforceWindowSize() {
createIndex("test"); createIndex("test");
// this // this
int iters = atLeast(10); int iters = scaledRandomIntBetween(10, 20);
for (int i = 0; i < iters; i ++) { for (int i = 0; i < iters; i ++) {
client().prepareIndex("test", "type", Integer.toString(i)).setSource("f", Integer.toString(i)).execute().actionGet(); client().prepareIndex("test", "type", Integer.toString(i)).setSource("f", Integer.toString(i)).execute().actionGet();
} }
@ -236,7 +236,7 @@ public class QueryRescorerTests extends ElasticsearchIntegrationTest {
public void testEquivalence() throws Exception { public void testEquivalence() throws Exception {
int numDocs = indexRandomNumbers("whitespace"); int numDocs = indexRandomNumbers("whitespace");
final int iters = atLeast(50); final int iters = scaledRandomIntBetween(50, 100);
for (int i = 0; i < iters; i++) { for (int i = 0; i < iters; i++) {
int resultSize = between(5, 30); int resultSize = between(5, 30);
int rescoreWindow = between(1, 3) * resultSize; int rescoreWindow = between(1, 3) * resultSize;
@ -542,7 +542,7 @@ public class QueryRescorerTests extends ElasticsearchIntegrationTest {
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("field1") jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("field1")
.field("analyzer", analyzer).field("type", "string").endObject().endObject().endObject().endObject()) .field("analyzer", analyzer).field("type", "string").endObject().endObject().endObject().endObject())
.setSettings(builder)); .setSettings(builder));
int numDocs = atLeast(100); int numDocs = randomIntBetween(100, 150);
IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs];
for (int i = 0; i < numDocs; i++) { for (int i = 0; i < numDocs; i++) {
docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i)); docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i));

View File

@ -38,7 +38,7 @@ public class SearchScanScrollingTests extends ElasticsearchIntegrationTest {
@Test @Test
public void testRandomized() throws Exception { public void testRandomized() throws Exception {
testScroll(atLeast(100), between(1, 300), getRandom().nextBoolean(), getRandom().nextBoolean()); testScroll(scaledRandomIntBetween(100, 200), between(1, 300), getRandom().nextBoolean(), getRandom().nextBoolean());
} }
private void testScroll(long numberOfDocs, int size, boolean unbalanced, boolean trackScores) throws Exception { private void testScroll(long numberOfDocs, int size, boolean unbalanced, boolean trackScores) throws Exception {

View File

@ -45,7 +45,7 @@ public class SearchScanTests extends ElasticsearchIntegrationTest {
Set<String> ids = Sets.newHashSet(); Set<String> ids = Sets.newHashSet();
Set<String> expectedIds = Sets.newHashSet(); Set<String> expectedIds = Sets.newHashSet();
IndexRequestBuilder[] builders = new IndexRequestBuilder[atLeast(50)]; IndexRequestBuilder[] builders = new IndexRequestBuilder[scaledRandomIntBetween(50, 100)];
for (int i = 0; i < builders.length/2; i++) { for (int i = 0; i < builders.length/2; i++) {
expectedIds.add(Integer.toString(i)); expectedIds.add(Integer.toString(i));
builders[i] = client().prepareIndex("test", "tweet", Integer.toString(i)).setSource( builders[i] = client().prepareIndex("test", "tweet", Integer.toString(i)).setSource(

View File

@ -33,9 +33,7 @@ import java.util.concurrent.ExecutionException;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; import static org.elasticsearch.index.query.QueryBuilders.rangeQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
public class SimpleSearchTests extends ElasticsearchIntegrationTest { public class SimpleSearchTests extends ElasticsearchIntegrationTest {
@ -66,7 +64,7 @@ public class SimpleSearchTests extends ElasticsearchIntegrationTest {
client().prepareIndex("test", "type", "5").setSource("field", "value"), client().prepareIndex("test", "type", "5").setSource("field", "value"),
client().prepareIndex("test", "type", "6").setSource("field", "value")); client().prepareIndex("test", "type", "6").setSource("field", "value"));
int iters = atLeast(10); int iters = scaledRandomIntBetween(10, 20);
for (int i = 0; i < iters; i++) { for (int i = 0; i < iters; i++) {
// id is not indexed, but lets see that we automatically convert to // id is not indexed, but lets see that we automatically convert to
SearchResponse searchResponse = client().prepareSearch().setQuery(QueryBuilders.matchAllQuery()).setPreference(randomUnicodeOfLengthBetween(0, 4)).get(); SearchResponse searchResponse = client().prepareSearch().setQuery(QueryBuilders.matchAllQuery()).setPreference(randomUnicodeOfLengthBetween(0, 4)).get();

View File

@ -122,7 +122,7 @@ public class SimpleSortTests extends ElasticsearchIntegrationTest {
TreeMap<BytesRef, String> sparseBytes = new TreeMap<BytesRef, String>(); TreeMap<BytesRef, String> sparseBytes = new TreeMap<BytesRef, String>();
TreeMap<BytesRef, String> denseBytes = new TreeMap<BytesRef, String>(); TreeMap<BytesRef, String> denseBytes = new TreeMap<BytesRef, String>();
int numDocs = atLeast(200); int numDocs = randomIntBetween(200, 300);
IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs];
for (int i = 0; i < numDocs; i++) { for (int i = 0; i < numDocs; i++) {
String docId = Integer.toString(i); String docId = Integer.toString(i);
@ -1458,7 +1458,7 @@ public class SimpleSortTests extends ElasticsearchIntegrationTest {
.startObject("_timestamp").field("enabled", true).field("store", true).field("index", !timestampDocValues || randomBoolean() ? "not_analyzed" : "no").startObject("fielddata").field("format", timestampDocValues ? "doc_values" : null).endObject().endObject() .startObject("_timestamp").field("enabled", true).field("store", true).field("index", !timestampDocValues || randomBoolean() ? "not_analyzed" : "no").startObject("fielddata").field("format", timestampDocValues ? "doc_values" : null).endObject().endObject()
.endObject().endObject())); .endObject().endObject()));
ensureGreen(); ensureGreen();
final int numDocs = atLeast(10); final int numDocs = randomIntBetween(10, 20);
IndexRequestBuilder[] indexReqs = new IndexRequestBuilder[numDocs]; IndexRequestBuilder[] indexReqs = new IndexRequestBuilder[numDocs];
for (int i = 0; i < numDocs; ++i) { for (int i = 0; i < numDocs; ++i) {
indexReqs[i] = client().prepareIndex("test", "typ", Integer.toString(i)).setTimestamp(Integer.toString(randomInt(1000))).setSource(); indexReqs[i] = client().prepareIndex("test", "typ", Integer.toString(i)).setTimestamp(Integer.toString(randomInt(1000))).setSource();

View File

@ -977,10 +977,10 @@ public class CompletionSuggestSearchTests extends ElasticsearchIntegrationTest {
@Test @Test
public void testMaxFieldLength() throws IOException { public void testMaxFieldLength() throws IOException {
client().admin().indices().prepareCreate(INDEX).get(); client().admin().indices().prepareCreate(INDEX).get();
int iters = atLeast(10); int iters = scaledRandomIntBetween(10, 20);
for (int i = 0; i < iters; i++) { for (int i = 0; i < iters; i++) {
int maxInputLen = between(3, 50); int maxInputLen = between(3, 50);
String str = replaceReservedChars(randomRealisticUnicodeOfCodepointLengthBetween(maxInputLen + 1, atLeast(maxInputLen + 2)), (char) 0x01); String str = replaceReservedChars(randomRealisticUnicodeOfCodepointLengthBetween(maxInputLen + 1, maxInputLen + scaledRandomIntBetween(2, 50)), (char) 0x01);
ElasticsearchAssertions.assertAcked(client().admin().indices().preparePutMapping(INDEX).setType(TYPE).setSource(jsonBuilder().startObject() ElasticsearchAssertions.assertAcked(client().admin().indices().preparePutMapping(INDEX).setType(TYPE).setSource(jsonBuilder().startObject()
.startObject(TYPE).startObject("properties") .startObject(TYPE).startObject("properties")
.startObject(FIELD) .startObject(FIELD)
@ -1025,7 +1025,7 @@ public class CompletionSuggestSearchTests extends ElasticsearchIntegrationTest {
.endObject())); .endObject()));
ensureYellow(); ensureYellow();
// can cause stack overflow without the default max_input_length // can cause stack overflow without the default max_input_length
String longString = replaceReservedChars(randomRealisticUnicodeOfLength(atLeast(5000)), (char) 0x01); String longString = replaceReservedChars(randomRealisticUnicodeOfLength(randomIntBetween(5000, 10000)), (char) 0x01);
client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder()
.startObject().startObject(FIELD) .startObject().startObject(FIELD)
.startArray("input").value(longString).endArray() .startArray("input").value(longString).endArray()

View File

@ -128,7 +128,7 @@ public class CompletionPostingsFormatTest extends ElasticsearchTestCase {
XAnalyzingSuggester reference = new XAnalyzingSuggester(new StandardAnalyzer(TEST_VERSION_CURRENT), null, new StandardAnalyzer( XAnalyzingSuggester reference = new XAnalyzingSuggester(new StandardAnalyzer(TEST_VERSION_CURRENT), null, new StandardAnalyzer(
TEST_VERSION_CURRENT), options, 256, -1, preservePositionIncrements, null, false, 1, XAnalyzingSuggester.SEP_LABEL, XAnalyzingSuggester.PAYLOAD_SEP, XAnalyzingSuggester.END_BYTE, XAnalyzingSuggester.HOLE_CHARACTER); TEST_VERSION_CURRENT), options, 256, -1, preservePositionIncrements, null, false, 1, XAnalyzingSuggester.SEP_LABEL, XAnalyzingSuggester.PAYLOAD_SEP, XAnalyzingSuggester.END_BYTE, XAnalyzingSuggester.HOLE_CHARACTER);
LineFileDocs docs = new LineFileDocs(getRandom()); LineFileDocs docs = new LineFileDocs(getRandom());
int num = atLeast(150); int num = scaledRandomIntBetween(150, 300);
final String[] titles = new String[num]; final String[] titles = new String[num];
final long[] weights = new long[num]; final long[] weights = new long[num];
for (int i = 0; i < titles.length; i++) { for (int i = 0; i < titles.length; i++) {

View File

@ -19,6 +19,7 @@
package org.elasticsearch.test; package org.elasticsearch.test;
import com.carrotsearch.randomizedtesting.RandomizedTest;
import com.carrotsearch.randomizedtesting.annotations.*; import com.carrotsearch.randomizedtesting.annotations.*;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.Codec;
@ -48,4 +49,8 @@ public abstract class ElasticsearchLuceneTestCase extends LuceneTestCase {
public static void forceDefaultCodec() { public static void forceDefaultCodec() {
Codec.setDefault(DEFAULT_CODEC); Codec.setDefault(DEFAULT_CODEC);
} }
public static int scaledRandomIntBetween(int min, int max) {
return RandomizedTest.scaledRandomIntBetween(min, max);
}
} }

View File

@ -115,7 +115,7 @@ public class ReproduceInfoPrinter extends RunListener {
public ReproduceErrorMessageBuilder appendESProperties() { public ReproduceErrorMessageBuilder appendESProperties() {
appendProperties("es.logger.level", "es.node.mode", "es.node.local", TestCluster.TESTS_ENABLE_MOCK_MODULES, appendProperties("es.logger.level", "es.node.mode", "es.node.local", TestCluster.TESTS_ENABLE_MOCK_MODULES,
"tests.assertion.disabled", "tests.security.manager"); "tests.assertion.disabled", "tests.security.manager", "tests.nighly");
if (System.getProperty("tests.jvm.argline") != null && !System.getProperty("tests.jvm.argline").isEmpty()) { if (System.getProperty("tests.jvm.argline") != null && !System.getProperty("tests.jvm.argline").isEmpty()) {
appendOpt("tests.jvm.argline", "\"" + System.getProperty("tests.jvm.argline") + "\""); appendOpt("tests.jvm.argline", "\"" + System.getProperty("tests.jvm.argline") + "\"");
} }

View File

@ -76,7 +76,7 @@ public class KeyedLockTests extends ElasticsearchTestCase {
KeyedLock<String> connectionLock = new KeyedLock<String>(); KeyedLock<String> connectionLock = new KeyedLock<String>();
String[] names = new String[randomIntBetween(1, 40)]; String[] names = new String[randomIntBetween(1, 40)];
connectionLock = new KeyedLock<String>(); connectionLock = new KeyedLock<String>();
String name = randomRealisticUnicodeOfLength(atLeast(10)); String name = randomRealisticUnicodeOfLength(scaledRandomIntBetween(10, 50));
connectionLock.acquire(name); connectionLock.acquire(name);
connectionLock.acquire(name); connectionLock.acquire(name);
} }
@ -88,7 +88,7 @@ public class KeyedLockTests extends ElasticsearchTestCase {
KeyedLock<String> connectionLock = new KeyedLock<String>(); KeyedLock<String> connectionLock = new KeyedLock<String>();
String[] names = new String[randomIntBetween(1, 40)]; String[] names = new String[randomIntBetween(1, 40)];
connectionLock = new KeyedLock<String>(); connectionLock = new KeyedLock<String>();
String name = randomRealisticUnicodeOfLength(atLeast(10)); String name = randomRealisticUnicodeOfLength(scaledRandomIntBetween(10, 50));
connectionLock.release(name); connectionLock.release(name);
} }
@ -114,7 +114,7 @@ public class KeyedLockTests extends ElasticsearchTestCase {
} catch (InterruptedException e) { } catch (InterruptedException e) {
throw new RuntimeException(); throw new RuntimeException();
} }
int numRuns = atLeast(500); int numRuns = scaledRandomIntBetween(500, 5000);
for (int i = 0; i < numRuns; i++) { for (int i = 0; i < numRuns; i++) {
String curName = names[randomInt(names.length - 1)]; String curName = names[randomInt(names.length - 1)];
connectionLock.acquire(curName); connectionLock.acquire(curName);