Merge pull request #17394 from camilojd/refactor/replace-getrandom
Refactor: replace all ocurrences of ESTestCase.getRandom() with LuceneTestCase.random()
This commit is contained in:
commit
1c16d63a9a
|
@ -44,7 +44,7 @@ public class MinDocQueryTests extends ESTestCase {
|
|||
final int numDocs = randomIntBetween(10, 200);
|
||||
final Document doc = new Document();
|
||||
final Directory dir = newDirectory();
|
||||
final RandomIndexWriter w = new RandomIndexWriter(getRandom(), dir);
|
||||
final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
|
||||
for (int i = 0; i < numDocs; ++i) {
|
||||
w.addDocument(doc);
|
||||
}
|
||||
|
|
|
@ -339,9 +339,9 @@ public class ESExceptionTests extends ESTestCase {
|
|||
}
|
||||
assertArrayEquals(e.getStackTrace(), ex.getStackTrace());
|
||||
assertTrue(e.getStackTrace().length > 1);
|
||||
ElasticsearchAssertions.assertVersionSerializable(VersionUtils.randomVersion(getRandom()), t);
|
||||
ElasticsearchAssertions.assertVersionSerializable(VersionUtils.randomVersion(getRandom()), ex);
|
||||
ElasticsearchAssertions.assertVersionSerializable(VersionUtils.randomVersion(getRandom()), e);
|
||||
ElasticsearchAssertions.assertVersionSerializable(VersionUtils.randomVersion(random()), t);
|
||||
ElasticsearchAssertions.assertVersionSerializable(VersionUtils.randomVersion(random()), ex);
|
||||
ElasticsearchAssertions.assertVersionSerializable(VersionUtils.randomVersion(random()), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -203,7 +203,7 @@ public class BulkProcessorIT extends ESIntegTestCase {
|
|||
//let's make sure that the bulk action limit trips, one single execution will index all the documents
|
||||
.setConcurrentRequests(randomIntBetween(0, 1)).setBulkActions(numDocs)
|
||||
.setFlushInterval(TimeValue.timeValueHours(24)).setBulkSize(new ByteSizeValue(randomIntBetween(1, 10),
|
||||
RandomPicks.randomFrom(getRandom(), ByteSizeUnit.values())))
|
||||
RandomPicks.randomFrom(random(), ByteSizeUnit.values())))
|
||||
.build();
|
||||
|
||||
MultiGetRequestBuilder multiGetRequestBuilder = indexDocs(client(), processor, numDocs);
|
||||
|
|
|
@ -26,7 +26,7 @@ import static org.apache.lucene.util.TestUtil.randomSimpleString;
|
|||
|
||||
public class BulkShardRequestTests extends ESTestCase {
|
||||
public void testToString() {
|
||||
String index = randomSimpleString(getRandom(), 10);
|
||||
String index = randomSimpleString(random(), 10);
|
||||
int count = between(1, 100);
|
||||
BulkShardRequest r = new BulkShardRequest(null, new ShardId(index, "ignored", 0), false, new BulkItemRequest[count]);
|
||||
assertEquals("BulkShardRequest to [" + index + "] containing [" + count + "] requests", r.toString());
|
||||
|
|
|
@ -70,7 +70,7 @@ public class BasicAnalysisBackwardCompatibilityIT extends ESBackcompatTestCase {
|
|||
// cause differences when the random string generated contains these complex characters. To mitigate
|
||||
// the problem, we skip any strings containing these characters.
|
||||
// TODO: only skip strings containing complex chars when comparing against ES <= 1.3.x
|
||||
input = TestUtil.randomAnalysisString(getRandom(), 100, false);
|
||||
input = TestUtil.randomAnalysisString(random(), 100, false);
|
||||
matcher = complexUnicodeChars.matcher(input);
|
||||
} while (matcher.find());
|
||||
|
||||
|
@ -104,7 +104,7 @@ public class BasicAnalysisBackwardCompatibilityIT extends ESBackcompatTestCase {
|
|||
}
|
||||
|
||||
private String randomAnalyzer() {
|
||||
PreBuiltAnalyzers preBuiltAnalyzers = RandomPicks.randomFrom(getRandom(), PreBuiltAnalyzers.values());
|
||||
PreBuiltAnalyzers preBuiltAnalyzers = RandomPicks.randomFrom(random(), PreBuiltAnalyzers.values());
|
||||
return preBuiltAnalyzers.name().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
|
||||
|
|
|
@ -319,7 +319,7 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase {
|
|||
IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs];
|
||||
String[] indexForDoc = new String[docs.length];
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
docs[i] = client().prepareIndex(indexForDoc[i] = RandomPicks.randomFrom(getRandom(), indices), "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "num_int", randomInt(), "num_double", randomDouble());
|
||||
docs[i] = client().prepareIndex(indexForDoc[i] = RandomPicks.randomFrom(random(), indices), "type1", String.valueOf(i)).setSource("field1", English.intToEnglish(i), "num_int", randomInt(), "num_double", randomDouble());
|
||||
}
|
||||
indexRandom(true, docs);
|
||||
for (String index : indices) {
|
||||
|
|
|
@ -63,7 +63,7 @@ public class TransportClientNodesServiceTests extends ESTestCase {
|
|||
|
||||
TestIteration() {
|
||||
threadPool = new ThreadPool("transport-client-nodes-service-tests");
|
||||
transport = new FailAndRetryMockTransport<TestResponse>(getRandom()) {
|
||||
transport = new FailAndRetryMockTransport<TestResponse>(random()) {
|
||||
@Override
|
||||
public List<String> getLocalAddresses() {
|
||||
return Collections.emptyList();
|
||||
|
|
|
@ -492,7 +492,7 @@ public class ClusterStateDiffIT extends ESIntegTestCase {
|
|||
public IndexMetaData randomCreate(String name) {
|
||||
IndexMetaData.Builder builder = IndexMetaData.builder(name);
|
||||
Settings.Builder settingsBuilder = Settings.builder();
|
||||
setRandomIndexSettings(getRandom(), settingsBuilder);
|
||||
setRandomIndexSettings(random(), settingsBuilder);
|
||||
settingsBuilder.put(randomSettings(Settings.EMPTY)).put(IndexMetaData.SETTING_VERSION_CREATED, randomVersion(random()));
|
||||
builder.settings(settingsBuilder);
|
||||
builder.numberOfShards(randomIntBetween(1, 10)).numberOfReplicas(randomInt(10));
|
||||
|
@ -672,6 +672,6 @@ public class ClusterStateDiffIT extends ESIntegTestCase {
|
|||
* Generates a random name that starts with the given prefix
|
||||
*/
|
||||
private String randomName(String prefix) {
|
||||
return prefix + Strings.randomBase64UUID(getRandom());
|
||||
return prefix + Strings.randomBase64UUID(random());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -72,7 +72,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase {
|
|||
}
|
||||
|
||||
public void testSerialization() throws Exception {
|
||||
UnassignedInfo meta = new UnassignedInfo(RandomPicks.randomFrom(getRandom(), UnassignedInfo.Reason.values()), randomBoolean() ? randomAsciiOfLength(4) : null);
|
||||
UnassignedInfo meta = new UnassignedInfo(RandomPicks.randomFrom(random(), UnassignedInfo.Reason.values()), randomBoolean() ? randomAsciiOfLength(4) : null);
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
meta.writeTo(out);
|
||||
out.close();
|
||||
|
@ -273,7 +273,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase {
|
|||
public void testUnassignedDelayOnlyNodeLeftNonNodeLeftReason() throws Exception {
|
||||
EnumSet<UnassignedInfo.Reason> reasons = EnumSet.allOf(UnassignedInfo.Reason.class);
|
||||
reasons.remove(UnassignedInfo.Reason.NODE_LEFT);
|
||||
UnassignedInfo unassignedInfo = new UnassignedInfo(RandomPicks.randomFrom(getRandom(), reasons), null);
|
||||
UnassignedInfo unassignedInfo = new UnassignedInfo(RandomPicks.randomFrom(random(), reasons), null);
|
||||
long delay = unassignedInfo.updateDelay(unassignedInfo.getUnassignedTimeInNanos() + 1, // add 1 tick delay
|
||||
Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "10h").build(), Settings.EMPTY);
|
||||
assertThat(delay, equalTo(0L));
|
||||
|
|
|
@ -313,7 +313,7 @@ public class BalanceConfigurationTests extends ESAllocationTestCase {
|
|||
public void testNoRebalanceOnPrimaryOverload() {
|
||||
Settings.Builder settings = settingsBuilder();
|
||||
AllocationService strategy = new AllocationService(settings.build(), randomAllocationDeciders(settings.build(),
|
||||
new ClusterSettings(Settings.Builder.EMPTY_SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), getRandom()),
|
||||
new ClusterSettings(Settings.Builder.EMPTY_SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), random()),
|
||||
NoopGatewayAllocator.INSTANCE, new ShardsAllocator() {
|
||||
|
||||
public Map<DiscoveryNode, Float> weighShard(RoutingAllocation allocation, ShardRouting shard) {
|
||||
|
|
|
@ -56,7 +56,7 @@ public class RandomAllocationDeciderTests extends ESAllocationTestCase {
|
|||
* already allocated on a node and balances the cluster to gain optimal
|
||||
* balance.*/
|
||||
public void testRandomDecisions() {
|
||||
RandomAllocationDecider randomAllocationDecider = new RandomAllocationDecider(getRandom());
|
||||
RandomAllocationDecider randomAllocationDecider = new RandomAllocationDecider(random());
|
||||
AllocationService strategy = new AllocationService(settingsBuilder().build(), new AllocationDeciders(Settings.EMPTY,
|
||||
new HashSet<>(Arrays.asList(new SameShardAllocationDecider(Settings.EMPTY), new ReplicaAfterPrimaryActiveAllocationDecider(Settings.EMPTY),
|
||||
randomAllocationDecider))), NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE);
|
||||
|
|
|
@ -155,14 +155,14 @@ public class EnableAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
public void testEnableClusterBalance() {
|
||||
final boolean useClusterSetting = randomBoolean();
|
||||
final Rebalance allowedOnes = RandomPicks.randomFrom(getRandom(), EnumSet.of(Rebalance.PRIMARIES, Rebalance.REPLICAS, Rebalance.ALL));
|
||||
final Rebalance allowedOnes = RandomPicks.randomFrom(random(), EnumSet.of(Rebalance.PRIMARIES, Rebalance.REPLICAS, Rebalance.ALL));
|
||||
Settings build = settingsBuilder()
|
||||
.put(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), useClusterSetting ? Rebalance.NONE: RandomPicks.randomFrom(getRandom(), Rebalance.values())) // index settings override cluster settings
|
||||
.put(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), useClusterSetting ? Rebalance.NONE: RandomPicks.randomFrom(random(), Rebalance.values())) // index settings override cluster settings
|
||||
.put(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING.getKey(), 3)
|
||||
.put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 10)
|
||||
.build();
|
||||
ClusterSettings clusterSettings = new ClusterSettings(build, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
|
||||
AllocationService strategy = createAllocationService(build, clusterSettings, getRandom());
|
||||
AllocationService strategy = createAllocationService(build, clusterSettings, random());
|
||||
Settings indexSettings = useClusterSetting ? Settings.EMPTY : settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), Rebalance.NONE).build();
|
||||
|
||||
logger.info("Building initial routing table");
|
||||
|
@ -260,11 +260,11 @@ public class EnableAllocationTests extends ESAllocationTestCase {
|
|||
public void testEnableClusterBalanceNoReplicas() {
|
||||
final boolean useClusterSetting = randomBoolean();
|
||||
Settings build = settingsBuilder()
|
||||
.put(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), useClusterSetting ? Rebalance.NONE: RandomPicks.randomFrom(getRandom(), Rebalance.values())) // index settings override cluster settings
|
||||
.put(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), useClusterSetting ? Rebalance.NONE: RandomPicks.randomFrom(random(), Rebalance.values())) // index settings override cluster settings
|
||||
.put(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING.getKey(), 3)
|
||||
.build();
|
||||
ClusterSettings clusterSettings = new ClusterSettings(build, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
|
||||
AllocationService strategy = createAllocationService(build, clusterSettings, getRandom());
|
||||
AllocationService strategy = createAllocationService(build, clusterSettings, random());
|
||||
Settings indexSettings = useClusterSetting ? Settings.EMPTY : settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), Rebalance.NONE).build();
|
||||
|
||||
logger.info("Building initial routing table");
|
||||
|
|
|
@ -30,10 +30,10 @@ public class BytesReferenceTests extends ESTestCase {
|
|||
final int len = randomIntBetween(0, randomBoolean() ? 10: 100000);
|
||||
final int offset1 = randomInt(5);
|
||||
final byte[] array1 = new byte[offset1 + len + randomInt(5)];
|
||||
getRandom().nextBytes(array1);
|
||||
random().nextBytes(array1);
|
||||
final int offset2 = randomInt(offset1);
|
||||
final byte[] array2 = Arrays.copyOfRange(array1, offset1 - offset2, array1.length);
|
||||
|
||||
|
||||
final BytesArray b1 = new BytesArray(array1, offset1, len);
|
||||
final BytesArray b2 = new BytesArray(array2, offset2, len);
|
||||
assertTrue(BytesReference.Helper.bytesEqual(b1, b2));
|
||||
|
|
|
@ -115,7 +115,7 @@ public class PagedBytesReferenceTests extends ESTestCase {
|
|||
|
||||
// buffer for bulk reads
|
||||
byte[] origBuf = new byte[length];
|
||||
getRandom().nextBytes(origBuf);
|
||||
random().nextBytes(origBuf);
|
||||
byte[] targetBuf = Arrays.copyOf(origBuf, origBuf.length);
|
||||
|
||||
// bulk-read 0 bytes: must not modify buffer
|
||||
|
@ -172,7 +172,7 @@ public class PagedBytesReferenceTests extends ESTestCase {
|
|||
byte[] pbrBytesWithOffset = Arrays.copyOfRange(pbr.toBytes(), offset, length);
|
||||
// randomized target buffer to ensure no stale slots
|
||||
byte[] targetBytes = new byte[pbrBytesWithOffset.length];
|
||||
getRandom().nextBytes(targetBytes);
|
||||
random().nextBytes(targetBytes);
|
||||
|
||||
// bulk-read all
|
||||
si.readFully(targetBytes);
|
||||
|
@ -574,7 +574,7 @@ public class PagedBytesReferenceTests extends ESTestCase {
|
|||
ReleasableBytesStreamOutput out = new ReleasableBytesStreamOutput(length, bigarrays);
|
||||
try {
|
||||
for (int i = 0; i < length; i++) {
|
||||
out.writeByte((byte) getRandom().nextInt(1 << 8));
|
||||
out.writeByte((byte) random().nextInt(1 << 8));
|
||||
}
|
||||
} catch (IOException e) {
|
||||
fail("should not happen " + e.getMessage());
|
||||
|
|
|
@ -46,7 +46,7 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRandom() throws IOException {
|
||||
Random r = getRandom();
|
||||
Random r = random();
|
||||
for (int i = 0; i < 10; i++) {
|
||||
byte bytes[] = new byte[TestUtil.nextInt(r, 1, 100000)];
|
||||
r.nextBytes(bytes);
|
||||
|
@ -55,7 +55,7 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRandomThreads() throws Exception {
|
||||
final Random r = getRandom();
|
||||
final Random r = random();
|
||||
int threadCount = TestUtil.nextInt(r, 2, 6);
|
||||
Thread[] threads = new Thread[threadCount];
|
||||
final CountDownLatch startingGun = new CountDownLatch(1);
|
||||
|
@ -86,7 +86,7 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testLineDocs() throws IOException {
|
||||
Random r = getRandom();
|
||||
Random r = random();
|
||||
LineFileDocs lineFileDocs = new LineFileDocs(r);
|
||||
for (int i = 0; i < 10; i++) {
|
||||
int numDocs = TestUtil.nextInt(r, 1, 200);
|
||||
|
@ -101,7 +101,7 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testLineDocsThreads() throws Exception {
|
||||
final Random r = getRandom();
|
||||
final Random r = random();
|
||||
int threadCount = TestUtil.nextInt(r, 2, 6);
|
||||
Thread[] threads = new Thread[threadCount];
|
||||
final CountDownLatch startingGun = new CountDownLatch(1);
|
||||
|
@ -138,7 +138,7 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRepetitionsL() throws IOException {
|
||||
Random r = getRandom();
|
||||
Random r = random();
|
||||
for (int i = 0; i < 10; i++) {
|
||||
int numLongs = TestUtil.nextInt(r, 1, 10000);
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
|
@ -161,7 +161,7 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRepetitionsLThreads() throws Exception {
|
||||
final Random r = getRandom();
|
||||
final Random r = random();
|
||||
int threadCount = TestUtil.nextInt(r, 2, 6);
|
||||
Thread[] threads = new Thread[threadCount];
|
||||
final CountDownLatch startingGun = new CountDownLatch(1);
|
||||
|
@ -206,7 +206,7 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRepetitionsI() throws IOException {
|
||||
Random r = getRandom();
|
||||
Random r = random();
|
||||
for (int i = 0; i < 10; i++) {
|
||||
int numInts = TestUtil.nextInt(r, 1, 20000);
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
|
@ -225,7 +225,7 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRepetitionsIThreads() throws Exception {
|
||||
final Random r = getRandom();
|
||||
final Random r = random();
|
||||
int threadCount = TestUtil.nextInt(r, 2, 6);
|
||||
Thread[] threads = new Thread[threadCount];
|
||||
final CountDownLatch startingGun = new CountDownLatch(1);
|
||||
|
@ -266,7 +266,7 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRepetitionsS() throws IOException {
|
||||
Random r = getRandom();
|
||||
Random r = random();
|
||||
for (int i = 0; i < 10; i++) {
|
||||
int numShorts = TestUtil.nextInt(r, 1, 40000);
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
|
@ -283,7 +283,7 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testMixed() throws IOException {
|
||||
Random r = getRandom();
|
||||
Random r = random();
|
||||
LineFileDocs lineFileDocs = new LineFileDocs(r);
|
||||
for (int i = 0; i < 2; ++i) {
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
|
@ -349,7 +349,7 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRepetitionsSThreads() throws Exception {
|
||||
final Random r = getRandom();
|
||||
final Random r = random();
|
||||
int threadCount = TestUtil.nextInt(r, 2, 6);
|
||||
Thread[] threads = new Thread[threadCount];
|
||||
final CountDownLatch startingGun = new CountDownLatch(1);
|
||||
|
@ -396,8 +396,8 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
OutputStreamStreamOutput rawOs = new OutputStreamStreamOutput(bos);
|
||||
StreamOutput os = c.streamOutput(rawOs);
|
||||
|
||||
Random r = getRandom();
|
||||
int bufferSize = r.nextBoolean() ? 65535 : TestUtil.nextInt(getRandom(), 1, 70000);
|
||||
Random r = random();
|
||||
int bufferSize = r.nextBoolean() ? 65535 : TestUtil.nextInt(random(), 1, 70000);
|
||||
int prepadding = r.nextInt(70000);
|
||||
int postpadding = r.nextInt(70000);
|
||||
byte buffer[] = new byte[prepadding + bufferSize + postpadding];
|
||||
|
@ -417,7 +417,7 @@ public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
|||
StreamInput in = c.streamInput(compressedIn);
|
||||
|
||||
// randomize constants again
|
||||
bufferSize = r.nextBoolean() ? 65535 : TestUtil.nextInt(getRandom(), 1, 70000);
|
||||
bufferSize = r.nextBoolean() ? 65535 : TestUtil.nextInt(random(), 1, 70000);
|
||||
prepadding = r.nextInt(70000);
|
||||
postpadding = r.nextInt(70000);
|
||||
buffer = new byte[prepadding + bufferSize + postpadding];
|
||||
|
|
|
@ -72,7 +72,7 @@ public abstract class AbstractCompressedXContentTestCase extends ESTestCase {
|
|||
Compressor defaultCompressor = CompressorFactory.defaultCompressor();
|
||||
try {
|
||||
CompressorFactory.setDefaultCompressor(compressor);
|
||||
Random r = getRandom();
|
||||
Random r = random();
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
String string = TestUtil.randomUnicodeString(r, 10000);
|
||||
// hack to make it detected as YAML
|
||||
|
|
|
@ -72,7 +72,7 @@ public class EnvelopeBuilderTests extends AbstractShapeBuilderTestCase<EnvelopeB
|
|||
}
|
||||
|
||||
static EnvelopeBuilder createRandomShape() {
|
||||
Rectangle box = RandomShapeGenerator.xRandomRectangle(getRandom(), RandomShapeGenerator.xRandomPoint(getRandom()));
|
||||
Rectangle box = RandomShapeGenerator.xRandomRectangle(random(), RandomShapeGenerator.xRandomPoint(random()));
|
||||
EnvelopeBuilder envelope = new EnvelopeBuilder(new Coordinate(box.getMinX(), box.getMaxY()),
|
||||
new Coordinate(box.getMaxX(), box.getMinY()));
|
||||
return envelope;
|
||||
|
|
|
@ -100,7 +100,7 @@ public class GeometryCollectionBuilderTests extends AbstractShapeBuilderTestCase
|
|||
}
|
||||
mutation.shapes.set(shapePosition, shapeToChange);
|
||||
} else {
|
||||
mutation.shape(RandomShapeGenerator.createShape(getRandom()));
|
||||
mutation.shape(RandomShapeGenerator.createShape(random()));
|
||||
}
|
||||
return mutation;
|
||||
}
|
||||
|
|
|
@ -69,7 +69,7 @@ public class LineStringBuilderTests extends AbstractShapeBuilderTestCase<LineStr
|
|||
}
|
||||
|
||||
static LineStringBuilder createRandomShape() {
|
||||
LineStringBuilder lsb = (LineStringBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.LINESTRING);
|
||||
LineStringBuilder lsb = (LineStringBuilder) RandomShapeGenerator.createShape(random(), ShapeType.LINESTRING);
|
||||
if (randomBoolean()) {
|
||||
lsb.close();
|
||||
}
|
||||
|
|
|
@ -62,7 +62,7 @@ public class MultiLineStringBuilderTests extends AbstractShapeBuilderTestCase<Mu
|
|||
}
|
||||
}
|
||||
} else {
|
||||
mutation.linestring((LineStringBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.LINESTRING));
|
||||
mutation.linestring((LineStringBuilder) RandomShapeGenerator.createShape(random(), ShapeType.LINESTRING));
|
||||
}
|
||||
return mutation;
|
||||
}
|
||||
|
|
|
@ -74,6 +74,6 @@ public class MultiPointBuilderTests extends AbstractShapeBuilderTestCase<MultiPo
|
|||
}
|
||||
|
||||
static MultiPointBuilder createRandomShape() {
|
||||
return (MultiPointBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.MULTIPOINT);
|
||||
return (MultiPointBuilder) RandomShapeGenerator.createShape(random(), ShapeType.MULTIPOINT);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -50,7 +50,7 @@ public class MultiPolygonBuilderTests extends AbstractShapeBuilderTestCase<Multi
|
|||
int polyToChange = randomInt(mutation.polygons().size() - 1);
|
||||
mutation.polygons().set(polyToChange, PolygonBuilderTests.mutatePolygonBuilder(mutation.polygons().get(polyToChange)));
|
||||
} else {
|
||||
mutation.polygon((PolygonBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.POLYGON));
|
||||
mutation.polygon((PolygonBuilder) RandomShapeGenerator.createShape(random(), ShapeType.POLYGON));
|
||||
}
|
||||
}
|
||||
return mutation;
|
||||
|
@ -60,7 +60,7 @@ public class MultiPolygonBuilderTests extends AbstractShapeBuilderTestCase<Multi
|
|||
MultiPolygonBuilder mpb = new MultiPolygonBuilder(randomFrom(Orientation.values()));
|
||||
int polys = randomIntBetween(0, 10);
|
||||
for (int i = 0; i < polys; i++) {
|
||||
PolygonBuilder pgb = (PolygonBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.POLYGON);
|
||||
PolygonBuilder pgb = (PolygonBuilder) RandomShapeGenerator.createShape(random(), ShapeType.POLYGON);
|
||||
mpb.polygon(pgb);
|
||||
}
|
||||
return mpb;
|
||||
|
|
|
@ -42,7 +42,7 @@ public class PointBuilderTests extends AbstractShapeBuilderTestCase<PointBuilder
|
|||
}
|
||||
|
||||
static PointBuilder createRandomShape() {
|
||||
return (PointBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.POINT);
|
||||
return (PointBuilder) RandomShapeGenerator.createShape(random(), ShapeType.POINT);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -89,7 +89,7 @@ public class PolygonBuilderTests extends AbstractShapeBuilderTestCase<PolygonBui
|
|||
}
|
||||
|
||||
static PolygonBuilder createRandomShape() {
|
||||
PolygonBuilder pgb = (PolygonBuilder) RandomShapeGenerator.createShape(getRandom(), ShapeType.POLYGON);
|
||||
PolygonBuilder pgb = (PolygonBuilder) RandomShapeGenerator.createShape(random(), ShapeType.POLYGON);
|
||||
if (randomBoolean()) {
|
||||
pgb = polyWithOposingOrientation(pgb);
|
||||
}
|
||||
|
|
|
@ -495,7 +495,7 @@ public class BytesStreamsTests extends ESTestCase {
|
|||
// create & fill byte[] with randomized data
|
||||
protected byte[] randomizedByteArrayWithSize(int size) {
|
||||
byte[] data = new byte[size];
|
||||
getRandom().nextBytes(data);
|
||||
random().nextBytes(data);
|
||||
return data;
|
||||
}
|
||||
|
||||
|
|
|
@ -324,7 +324,7 @@ public class LuceneTests extends ESTestCase {
|
|||
|
||||
public void testCount() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter w = new RandomIndexWriter(getRandom(), dir);
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir);
|
||||
|
||||
try (DirectoryReader reader = w.getReader()) {
|
||||
// match_all does not match anything on an empty index
|
||||
|
|
|
@ -106,7 +106,7 @@ public abstract class AbstractRecyclerTestCase extends ESTestCase {
|
|||
Recycler<byte[]> r = newRecycler(limit);
|
||||
Recycler.V<byte[]> o = r.obtain();
|
||||
assertFresh(o.v());
|
||||
getRandom().nextBytes(o.v());
|
||||
random().nextBytes(o.v());
|
||||
o.close();
|
||||
o = r.obtain();
|
||||
assertRecycled(o.v());
|
||||
|
@ -166,7 +166,7 @@ public abstract class AbstractRecyclerTestCase extends ESTestCase {
|
|||
assertFresh(data);
|
||||
|
||||
// randomize & return to pool
|
||||
getRandom().nextBytes(data);
|
||||
random().nextBytes(data);
|
||||
o.close();
|
||||
|
||||
// verify that recycle() ran
|
||||
|
|
|
@ -31,7 +31,7 @@ public class RegexTests extends ESTestCase {
|
|||
"LITERAL", "COMMENTS", "UNICODE_CHAR_CLASS", "UNICODE_CHARACTER_CLASS"};
|
||||
int[] flags = new int[]{Pattern.CASE_INSENSITIVE, Pattern.MULTILINE, Pattern.DOTALL, Pattern.UNICODE_CASE, Pattern.CANON_EQ,
|
||||
Pattern.UNIX_LINES, Pattern.LITERAL, Pattern.COMMENTS, Regex.UNICODE_CHARACTER_CLASS};
|
||||
Random random = getRandom();
|
||||
Random random = random();
|
||||
int num = 10 + random.nextInt(100);
|
||||
for (int i = 0; i < num; i++) {
|
||||
int numFlags = random.nextInt(flags.length + 1);
|
||||
|
@ -63,4 +63,4 @@ public class RegexTests extends ESTestCase {
|
|||
assertTrue(Regex.simpleMatch("fff*******ddd", "fffabcddd"));
|
||||
assertFalse(Regex.simpleMatch("fff******ddd", "fffabcdd"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -235,7 +235,7 @@ public class BigArraysTests extends ESSingleNodeTestCase {
|
|||
|
||||
public void testByteArrayBulkGet() {
|
||||
final byte[] array1 = new byte[randomIntBetween(1, 4000000)];
|
||||
getRandom().nextBytes(array1);
|
||||
random().nextBytes(array1);
|
||||
final ByteArray array2 = bigArrays.newByteArray(array1.length, randomBoolean());
|
||||
for (int i = 0; i < array1.length; ++i) {
|
||||
array2.set(i, array1[i]);
|
||||
|
@ -252,7 +252,7 @@ public class BigArraysTests extends ESSingleNodeTestCase {
|
|||
|
||||
public void testByteArrayBulkSet() {
|
||||
final byte[] array1 = new byte[randomIntBetween(1, 4000000)];
|
||||
getRandom().nextBytes(array1);
|
||||
random().nextBytes(array1);
|
||||
final ByteArray array2 = bigArrays.newByteArray(array1.length, randomBoolean());
|
||||
for (int i = 0; i < array1.length; ) {
|
||||
final int len = Math.min(array1.length - i, randomBoolean() ? randomInt(10) : randomInt(3 * BigArrays.BYTE_PAGE_SIZE));
|
||||
|
@ -315,7 +315,7 @@ public class BigArraysTests extends ESSingleNodeTestCase {
|
|||
|
||||
// large arrays should be different
|
||||
final byte[] array1 = new byte[randomIntBetween(1, 4000000)];
|
||||
getRandom().nextBytes(array1);
|
||||
random().nextBytes(array1);
|
||||
final int array1Hash = Arrays.hashCode(array1);
|
||||
final ByteArray array2 = byteArrayWithBytes(array1);
|
||||
final int array2Hash = bigArrays.hashCode(array2);
|
||||
|
|
|
@ -110,7 +110,7 @@ public class BytesRefHashTests extends ESSingleNodeTestCase {
|
|||
for (int i = 0; i < 797; i++) {
|
||||
String str;
|
||||
do {
|
||||
str = TestUtil.randomRealisticUnicodeString(getRandom(), 1000);
|
||||
str = TestUtil.randomRealisticUnicodeString(random(), 1000);
|
||||
} while (str.length() == 0);
|
||||
ref.copyChars(str);
|
||||
long count = hash.size();
|
||||
|
@ -142,7 +142,7 @@ public class BytesRefHashTests extends ESSingleNodeTestCase {
|
|||
for (int i = 0; i < 797; i++) {
|
||||
String str;
|
||||
do {
|
||||
str = TestUtil.randomRealisticUnicodeString(getRandom(), 1000);
|
||||
str = TestUtil.randomRealisticUnicodeString(random(), 1000);
|
||||
} while (str.length() == 0);
|
||||
ref.copyChars(str);
|
||||
long count = hash.size();
|
||||
|
@ -181,7 +181,7 @@ public class BytesRefHashTests extends ESSingleNodeTestCase {
|
|||
for (int i = 0; i < 797; i++) {
|
||||
String str;
|
||||
do {
|
||||
str = TestUtil.randomRealisticUnicodeString(getRandom(), 1000);
|
||||
str = TestUtil.randomRealisticUnicodeString(random(), 1000);
|
||||
} while (str.length() == 0);
|
||||
ref.copyChars(str);
|
||||
long count = hash.size();
|
||||
|
@ -216,7 +216,7 @@ public class BytesRefHashTests extends ESSingleNodeTestCase {
|
|||
for (int i = 0; i < 797; i++) {
|
||||
String str;
|
||||
do {
|
||||
str = TestUtil.randomRealisticUnicodeString(getRandom(), 1000);
|
||||
str = TestUtil.randomRealisticUnicodeString(random(), 1000);
|
||||
} while (str.length() == 0);
|
||||
ref.copyChars(str);
|
||||
long count = hash.size();
|
||||
|
|
|
@ -218,7 +218,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
|||
|
||||
// Simulate a network issue between the unlucky node and elected master node in both directions.
|
||||
|
||||
NetworkDisconnectPartition networkDisconnect = new NetworkDisconnectPartition(masterNode, unluckyNode, getRandom());
|
||||
NetworkDisconnectPartition networkDisconnect = new NetworkDisconnectPartition(masterNode, unluckyNode, random());
|
||||
setDisruptionScheme(networkDisconnect);
|
||||
networkDisconnect.startDisrupting();
|
||||
|
||||
|
@ -562,7 +562,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
|||
|
||||
String oldMasterNode = internalCluster().getMasterName();
|
||||
// a very long GC, but it's OK as we remove the disruption when it has had an effect
|
||||
SingleNodeDisruption masterNodeDisruption = new IntermittentLongGCDisruption(oldMasterNode, getRandom(), 100, 200, 30000, 60000);
|
||||
SingleNodeDisruption masterNodeDisruption = new IntermittentLongGCDisruption(oldMasterNode, random(), 100, 200, 30000, 60000);
|
||||
internalCluster().setDisruptionScheme(masterNodeDisruption);
|
||||
masterNodeDisruption.startDisrupting();
|
||||
|
||||
|
@ -609,7 +609,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
|||
assertMaster(oldMasterNode, nodes);
|
||||
|
||||
// Simulating a painful gc by suspending all threads for a long time on the current elected master node.
|
||||
SingleNodeDisruption masterNodeDisruption = new LongGCDisruption(getRandom(), oldMasterNode);
|
||||
SingleNodeDisruption masterNodeDisruption = new LongGCDisruption(random(), oldMasterNode);
|
||||
|
||||
// Save the majority side
|
||||
final List<String> majoritySide = new ArrayList<>(nodes);
|
||||
|
@ -779,7 +779,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
// Simulate a network issue between the unlucky node and elected master node in both directions.
|
||||
NetworkDisconnectPartition networkDisconnect = new NetworkDisconnectPartition(masterNode, isolatedNode, getRandom());
|
||||
NetworkDisconnectPartition networkDisconnect = new NetworkDisconnectPartition(masterNode, isolatedNode, random());
|
||||
setDisruptionScheme(networkDisconnect);
|
||||
networkDisconnect.startDisrupting();
|
||||
// Wait until elected master has removed that the unlucky node...
|
||||
|
@ -816,7 +816,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
// Simulate a network issue between the unicast target node and the rest of the cluster
|
||||
NetworkDisconnectPartition networkDisconnect = new NetworkDisconnectPartition(unicastTargetSide, restOfClusterSide, getRandom());
|
||||
NetworkDisconnectPartition networkDisconnect = new NetworkDisconnectPartition(unicastTargetSide, restOfClusterSide, random());
|
||||
setDisruptionScheme(networkDisconnect);
|
||||
networkDisconnect.startDisrupting();
|
||||
// Wait until elected master has removed that the unlucky node...
|
||||
|
@ -955,7 +955,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
|||
public void testClusterFormingWithASlowNode() throws Exception {
|
||||
configureUnicastCluster(3, null, 2);
|
||||
|
||||
SlowClusterStateProcessing disruption = new SlowClusterStateProcessing(getRandom(), 0, 0, 1000, 2000);
|
||||
SlowClusterStateProcessing disruption = new SlowClusterStateProcessing(random(), 0, 0, 1000, 2000);
|
||||
|
||||
// don't wait for initial state, wat want to add the disruption while the cluster is forming..
|
||||
internalCluster().startNodesAsync(3,
|
||||
|
@ -1035,7 +1035,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
|||
indexRequestBuilderList.add(client().prepareIndex().setIndex("test").setType("doc").setSource("{\"int_field\":1}"));
|
||||
}
|
||||
indexRandom(true, indexRequestBuilderList);
|
||||
SingleNodeDisruption disruption = new BlockClusterStateProcessing(node_2, getRandom());
|
||||
SingleNodeDisruption disruption = new BlockClusterStateProcessing(node_2, random());
|
||||
|
||||
internalCluster().setDisruptionScheme(disruption);
|
||||
MockTransportService transportServiceNode2 = (MockTransportService) internalCluster().getInstance(TransportService.class, node_2);
|
||||
|
@ -1095,7 +1095,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
|||
ensureYellow();
|
||||
|
||||
final String masterNode1 = internalCluster().getMasterName();
|
||||
NetworkPartition networkPartition = new NetworkUnresponsivePartition(masterNode1, dataNode.get(), getRandom());
|
||||
NetworkPartition networkPartition = new NetworkUnresponsivePartition(masterNode1, dataNode.get(), random());
|
||||
internalCluster().setDisruptionScheme(networkPartition);
|
||||
networkPartition.startDisrupting();
|
||||
// We know this will time out due to the partition, we check manually below to not proceed until
|
||||
|
@ -1117,9 +1117,9 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
|||
protected NetworkPartition addRandomPartition() {
|
||||
NetworkPartition partition;
|
||||
if (randomBoolean()) {
|
||||
partition = new NetworkUnresponsivePartition(getRandom());
|
||||
partition = new NetworkUnresponsivePartition(random());
|
||||
} else {
|
||||
partition = new NetworkDisconnectPartition(getRandom());
|
||||
partition = new NetworkDisconnectPartition(random());
|
||||
}
|
||||
|
||||
setDisruptionScheme(partition);
|
||||
|
@ -1135,9 +1135,9 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
|||
|
||||
NetworkPartition partition;
|
||||
if (randomBoolean()) {
|
||||
partition = new NetworkUnresponsivePartition(side1, side2, getRandom());
|
||||
partition = new NetworkUnresponsivePartition(side1, side2, random());
|
||||
} else {
|
||||
partition = new NetworkDisconnectPartition(side1, side2, getRandom());
|
||||
partition = new NetworkDisconnectPartition(side1, side2, random());
|
||||
}
|
||||
|
||||
internalCluster().setDisruptionScheme(partition);
|
||||
|
@ -1148,10 +1148,10 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
|||
private ServiceDisruptionScheme addRandomDisruptionScheme() {
|
||||
// TODO: add partial partitions
|
||||
List<ServiceDisruptionScheme> list = Arrays.asList(
|
||||
new NetworkUnresponsivePartition(getRandom()),
|
||||
new NetworkDelaysPartition(getRandom()),
|
||||
new NetworkDisconnectPartition(getRandom()),
|
||||
new SlowClusterStateProcessing(getRandom())
|
||||
new NetworkUnresponsivePartition(random()),
|
||||
new NetworkDelaysPartition(random()),
|
||||
new NetworkDisconnectPartition(random()),
|
||||
new SlowClusterStateProcessing(random())
|
||||
);
|
||||
Collections.shuffle(list, random());
|
||||
setDisruptionScheme(list.get(0));
|
||||
|
|
|
@ -349,7 +349,7 @@ public class MetaDataStateFormatTests extends ESTestCase {
|
|||
|
||||
@Override
|
||||
protected Directory newDirectory(Path dir) throws IOException {
|
||||
MockDirectoryWrapper mock = new MockDirectoryWrapper(getRandom(), super.newDirectory(dir));
|
||||
MockDirectoryWrapper mock = new MockDirectoryWrapper(random(), super.newDirectory(dir));
|
||||
closeAfterSuite(mock);
|
||||
return mock;
|
||||
}
|
||||
|
|
|
@ -105,7 +105,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase {
|
|||
* and find a better copy for the shard.
|
||||
*/
|
||||
public void testAsyncFetchOnAnythingButIndexCreation() {
|
||||
UnassignedInfo.Reason reason = RandomPicks.randomFrom(getRandom(), EnumSet.complementOf(EnumSet.of(UnassignedInfo.Reason.INDEX_CREATED)));
|
||||
UnassignedInfo.Reason reason = RandomPicks.randomFrom(random(), EnumSet.complementOf(EnumSet.of(UnassignedInfo.Reason.INDEX_CREATED)));
|
||||
RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders(), Settings.EMPTY, reason);
|
||||
testAllocator.clean();
|
||||
testAllocator.allocateUnassigned(allocation);
|
||||
|
|
|
@ -51,7 +51,7 @@ public class AnalysisServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testDefaultAnalyzers() throws IOException {
|
||||
Version version = VersionUtils.randomVersion(getRandom());
|
||||
Version version = VersionUtils.randomVersion(random());
|
||||
Settings settings = Settings
|
||||
.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, version)
|
||||
|
@ -65,7 +65,7 @@ public class AnalysisServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testOverrideDefaultAnalyzer() throws IOException {
|
||||
Version version = VersionUtils.randomVersion(getRandom());
|
||||
Version version = VersionUtils.randomVersion(random());
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings),
|
||||
Collections.singletonMap("default", analyzerProvider("default")),
|
||||
|
@ -76,7 +76,7 @@ public class AnalysisServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testOverrideDefaultIndexAnalyzer() {
|
||||
Version version = VersionUtils.randomVersionBetween(getRandom(), Version.V_5_0_0_alpha1, Version.CURRENT);
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0_alpha1, Version.CURRENT);
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
try {
|
||||
AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings),
|
||||
|
@ -90,7 +90,7 @@ public class AnalysisServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testBackCompatOverrideDefaultIndexAnalyzer() {
|
||||
Version version = VersionUtils.randomVersionBetween(getRandom(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1));
|
||||
Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1));
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings),
|
||||
Collections.singletonMap("default_index", analyzerProvider("default_index")),
|
||||
|
@ -101,7 +101,7 @@ public class AnalysisServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testOverrideDefaultSearchAnalyzer() {
|
||||
Version version = VersionUtils.randomVersion(getRandom());
|
||||
Version version = VersionUtils.randomVersion(random());
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings),
|
||||
Collections.singletonMap("default_search", analyzerProvider("default_search")),
|
||||
|
@ -112,7 +112,7 @@ public class AnalysisServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testBackCompatOverrideDefaultIndexAndSearchAnalyzer() {
|
||||
Version version = VersionUtils.randomVersionBetween(getRandom(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1));
|
||||
Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1));
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
Map<String, AnalyzerProvider> analyzers = new HashMap<>();
|
||||
analyzers.put("default_index", analyzerProvider("default_index"));
|
||||
|
|
|
@ -246,11 +246,11 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI
|
|||
// missing value is set to an actual value
|
||||
final String[] values = new String[randomIntBetween(2, 30)];
|
||||
for (int i = 1; i < values.length; ++i) {
|
||||
values[i] = TestUtil.randomUnicodeString(getRandom());
|
||||
values[i] = TestUtil.randomUnicodeString(random());
|
||||
}
|
||||
final int numDocs = scaledRandomIntBetween(10, 3072);
|
||||
for (int i = 0; i < numDocs; ++i) {
|
||||
final String value = RandomPicks.randomFrom(getRandom(), values);
|
||||
final String value = RandomPicks.randomFrom(random(), values);
|
||||
if (value == null) {
|
||||
writer.addDocument(new Document());
|
||||
} else {
|
||||
|
@ -302,11 +302,11 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI
|
|||
public void testSortMissing(boolean first, boolean reverse) throws IOException {
|
||||
final String[] values = new String[randomIntBetween(2, 10)];
|
||||
for (int i = 1; i < values.length; ++i) {
|
||||
values[i] = TestUtil.randomUnicodeString(getRandom());
|
||||
values[i] = TestUtil.randomUnicodeString(random());
|
||||
}
|
||||
final int numDocs = scaledRandomIntBetween(10, 3072);
|
||||
for (int i = 0; i < numDocs; ++i) {
|
||||
final String value = RandomPicks.randomFrom(getRandom(), values);
|
||||
final String value = RandomPicks.randomFrom(random(), values);
|
||||
if (value == null) {
|
||||
writer.addDocument(new Document());
|
||||
} else {
|
||||
|
@ -355,7 +355,7 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI
|
|||
public void testNestedSorting(MultiValueMode sortMode) throws IOException {
|
||||
final String[] values = new String[randomIntBetween(2, 20)];
|
||||
for (int i = 0; i < values.length; ++i) {
|
||||
values[i] = TestUtil.randomSimpleString(getRandom());
|
||||
values[i] = TestUtil.randomSimpleString(random());
|
||||
}
|
||||
final int numParents = scaledRandomIntBetween(10, 3072);
|
||||
List<Document> docs = new ArrayList<>();
|
||||
|
@ -367,14 +367,14 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI
|
|||
final Document child = new Document();
|
||||
final int numValues = randomInt(3);
|
||||
for (int k = 0; k < numValues; ++k) {
|
||||
final String value = RandomPicks.randomFrom(getRandom(), values);
|
||||
final String value = RandomPicks.randomFrom(random(), values);
|
||||
addField(child, "text", value);
|
||||
}
|
||||
docs.add(child);
|
||||
}
|
||||
final Document parent = new Document();
|
||||
parent.add(new StringField("type", "parent", Store.YES));
|
||||
final String value = RandomPicks.randomFrom(getRandom(), values);
|
||||
final String value = RandomPicks.randomFrom(random(), values);
|
||||
if (value != null) {
|
||||
addField(parent, "text", value);
|
||||
}
|
||||
|
@ -400,10 +400,10 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI
|
|||
missingValue = "_last";
|
||||
break;
|
||||
case 2:
|
||||
missingValue = new BytesRef(RandomPicks.randomFrom(getRandom(), values));
|
||||
missingValue = new BytesRef(RandomPicks.randomFrom(random(), values));
|
||||
break;
|
||||
default:
|
||||
missingValue = new BytesRef(TestUtil.randomSimpleString(getRandom()));
|
||||
missingValue = new BytesRef(TestUtil.randomSimpleString(random()));
|
||||
break;
|
||||
}
|
||||
Query parentFilter = new TermQuery(new Term("type", "parent"));
|
||||
|
|
|
@ -107,7 +107,7 @@ public class BinaryDVFieldDataTests extends AbstractFieldDataTestCase {
|
|||
private byte[] randomBytes() {
|
||||
int size = randomIntBetween(10, 1000);
|
||||
byte[] bytes = new byte[size];
|
||||
getRandom().nextBytes(bytes);
|
||||
random().nextBytes(bytes);
|
||||
return bytes;
|
||||
}
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ public class FilterFieldDataTests extends AbstractFieldDataTestCase {
|
|||
}
|
||||
|
||||
public void testFilterByFrequency() throws Exception {
|
||||
Random random = getRandom();
|
||||
Random random = random();
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
Document d = new Document();
|
||||
d.add(new StringField("id", "" + i, Field.Store.NO));
|
||||
|
|
|
@ -45,7 +45,7 @@ public class MultiOrdinalsTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testRandomValues() throws IOException {
|
||||
Random random = getRandom();
|
||||
Random random = random();
|
||||
int numDocs = 100 + random.nextInt(1000);
|
||||
int numOrdinals = 1 + random.nextInt(200);
|
||||
int numValues = 100 + random.nextInt(100000);
|
||||
|
|
|
@ -83,7 +83,7 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.bytes());
|
||||
|
||||
try (Directory dir = new RAMDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(getRandom())))) {
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())))) {
|
||||
w.addDocuments(doc.docs());
|
||||
try (DirectoryReader reader = DirectoryReader.open(w)) {
|
||||
final LeafReader leaf = reader.leaves().get(0).reader();
|
||||
|
|
|
@ -844,7 +844,7 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
}
|
||||
|
||||
public static String randomGeohash(int minPrecision, int maxPrecision) {
|
||||
return geohashGenerator.ofStringLength(getRandom(), minPrecision, maxPrecision);
|
||||
return geohashGenerator.ofStringLength(random(), minPrecision, maxPrecision);
|
||||
}
|
||||
|
||||
public static class GeohashGenerator extends CodepointSetGenerator {
|
||||
|
|
|
@ -47,7 +47,7 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase<GeoBo
|
|||
@Override
|
||||
protected GeoBoundingBoxQueryBuilder doCreateTestQueryBuilder() {
|
||||
GeoBoundingBoxQueryBuilder builder = new GeoBoundingBoxQueryBuilder(GEO_POINT_FIELD_NAME);
|
||||
Rectangle box = RandomShapeGenerator.xRandomRectangle(getRandom(), RandomShapeGenerator.xRandomPoint(getRandom()));
|
||||
Rectangle box = RandomShapeGenerator.xRandomRectangle(random(), RandomShapeGenerator.xRandomPoint(random()));
|
||||
|
||||
if (randomBoolean()) {
|
||||
// check the top-left/bottom-right combination of setters
|
||||
|
|
|
@ -122,7 +122,7 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase<GeoPolygo
|
|||
// returns null. This is if there is an error generating the polygon. So
|
||||
// in this case keep trying until we successfully generate one
|
||||
while (shapeBuilder == null) {
|
||||
shapeBuilder = RandomShapeGenerator.createShapeWithin(getRandom(), null, ShapeType.POLYGON);
|
||||
shapeBuilder = RandomShapeGenerator.createShapeWithin(random(), null, ShapeType.POLYGON);
|
||||
}
|
||||
JtsGeometry shape = (JtsGeometry) shapeBuilder.build();
|
||||
Coordinate[] coordinates = shape.getGeom().getCoordinates();
|
||||
|
|
|
@ -59,8 +59,8 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQue
|
|||
|
||||
@Override
|
||||
protected GeoShapeQueryBuilder doCreateTestQueryBuilder() {
|
||||
ShapeType shapeType = ShapeType.randomType(getRandom());
|
||||
ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(getRandom(), null, shapeType);
|
||||
ShapeType shapeType = ShapeType.randomType(random());
|
||||
ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null, shapeType);
|
||||
|
||||
GeoShapeQueryBuilder builder;
|
||||
clearShapeFields();
|
||||
|
@ -148,7 +148,7 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQue
|
|||
}
|
||||
|
||||
public void testNoFieldName() throws Exception {
|
||||
ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(getRandom(), null);
|
||||
ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null);
|
||||
try {
|
||||
new GeoShapeQueryBuilder(null, shape);
|
||||
fail("Expected IllegalArgumentException");
|
||||
|
@ -185,7 +185,7 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQue
|
|||
}
|
||||
|
||||
public void testNoRelation() throws IOException {
|
||||
ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(getRandom(), null);
|
||||
ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null);
|
||||
GeoShapeQueryBuilder builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape);
|
||||
try {
|
||||
builder.relation(null);
|
||||
|
@ -196,7 +196,7 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQue
|
|||
}
|
||||
|
||||
public void testInvalidRelation() throws IOException {
|
||||
ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(getRandom(), null);
|
||||
ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null);
|
||||
GeoShapeQueryBuilder builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape);
|
||||
try {
|
||||
builder.strategy(SpatialStrategy.TERM);
|
||||
|
|
|
@ -121,7 +121,7 @@ public class GeohashCellQueryBuilderTests extends AbstractQueryTestCase<Builder>
|
|||
}
|
||||
|
||||
public void testLocationParsing() throws IOException {
|
||||
Point point = RandomShapeGenerator.xRandomPoint(getRandom());
|
||||
Point point = RandomShapeGenerator.xRandomPoint(random());
|
||||
Builder pointTestBuilder = new GeohashCellQuery.Builder("pin", new GeoPoint(point.getY(), point.getX()));
|
||||
String pointTest1 = "{\"geohash_cell\": {\"pin\": {\"lat\": " + point.getY() + ",\"lon\": " + point.getX() + "}}}";
|
||||
assertParsedQuery(pointTest1, pointTestBuilder);
|
||||
|
|
|
@ -136,7 +136,7 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
|
|||
queryStringQueryBuilder.useDisMax(randomBoolean());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
queryStringQueryBuilder.locale(randomLocale(getRandom()));
|
||||
queryStringQueryBuilder.locale(randomLocale(random()));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
queryStringQueryBuilder.timeZone(randomTimeZone());
|
||||
|
|
|
@ -58,7 +58,7 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase<SimpleQ
|
|||
result.lowercaseExpandedTerms(randomBoolean());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
result.locale(randomLocale(getRandom()));
|
||||
result.locale(randomLocale(random()));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
result.minimumShouldMatch(randomMinimumShouldMatch());
|
||||
|
|
|
@ -45,7 +45,7 @@ public class TemplateQueryBuilderTests extends AbstractQueryTestCase<TemplateQue
|
|||
|
||||
@BeforeClass
|
||||
public static void setupClass() {
|
||||
templateBase = RandomQueryBuilder.createQuery(getRandom());
|
||||
templateBase = RandomQueryBuilder.createQuery(random());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -73,7 +73,7 @@ public class NestedSortingTests extends AbstractFieldDataTestCase {
|
|||
List<Document> docs = new ArrayList<>(numChildren + 1);
|
||||
for (int j = 0; j < numChildren; ++j) {
|
||||
Document doc = new Document();
|
||||
doc.add(new StringField("f", TestUtil.randomSimpleString(getRandom(), 2), Field.Store.NO));
|
||||
doc.add(new StringField("f", TestUtil.randomSimpleString(random(), 2), Field.Store.NO));
|
||||
doc.add(new StringField("__type", "child", Field.Store.NO));
|
||||
docs.add(doc);
|
||||
}
|
||||
|
@ -96,7 +96,7 @@ public class NestedSortingTests extends AbstractFieldDataTestCase {
|
|||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
PagedBytesIndexFieldData indexFieldData1 = getForField("f");
|
||||
IndexFieldData<?> indexFieldData2 = NoOrdinalsStringFieldDataTests.hideOrdinals(indexFieldData1);
|
||||
final String missingValue = randomBoolean() ? null : TestUtil.randomSimpleString(getRandom(), 2);
|
||||
final String missingValue = randomBoolean() ? null : TestUtil.randomSimpleString(random(), 2);
|
||||
final int n = randomIntBetween(1, numDocs + 2);
|
||||
final boolean reverse = randomBoolean();
|
||||
|
||||
|
|
|
@ -608,7 +608,7 @@ public class CorruptedFileIT extends ESIntegTestCase {
|
|||
Index test = state.metaData().index("test").getIndex();
|
||||
GroupShardsIterator shardIterators = state.getRoutingNodes().getRoutingTable().activePrimaryShardsGrouped(new String[]{"test"}, false);
|
||||
List<ShardIterator> iterators = iterableAsArrayList(shardIterators);
|
||||
ShardIterator shardIterator = RandomPicks.randomFrom(getRandom(), iterators);
|
||||
ShardIterator shardIterator = RandomPicks.randomFrom(random(), iterators);
|
||||
ShardRouting shardRouting = shardIterator.nextOrNull();
|
||||
assertNotNull(shardRouting);
|
||||
assertTrue(shardRouting.primary());
|
||||
|
@ -632,7 +632,7 @@ public class CorruptedFileIT extends ESIntegTestCase {
|
|||
}
|
||||
}
|
||||
pruneOldDeleteGenerations(files);
|
||||
CorruptionUtils.corruptFile(getRandom(), files.toArray(new Path[0]));
|
||||
CorruptionUtils.corruptFile(random(), files.toArray(new Path[0]));
|
||||
return shardRouting;
|
||||
}
|
||||
|
||||
|
|
|
@ -113,7 +113,7 @@ public class CorruptedTranslogIT extends ESIntegTestCase {
|
|||
GroupShardsIterator shardIterators = state.getRoutingNodes().getRoutingTable().activePrimaryShardsGrouped(new String[]{"test"}, false);
|
||||
final Index test = state.metaData().index("test").getIndex();
|
||||
List<ShardIterator> iterators = iterableAsArrayList(shardIterators);
|
||||
ShardIterator shardIterator = RandomPicks.randomFrom(getRandom(), iterators);
|
||||
ShardIterator shardIterator = RandomPicks.randomFrom(random(), iterators);
|
||||
ShardRouting shardRouting = shardIterator.nextOrNull();
|
||||
assertNotNull(shardRouting);
|
||||
assertTrue(shardRouting.primary());
|
||||
|
@ -141,7 +141,7 @@ public class CorruptedTranslogIT extends ESIntegTestCase {
|
|||
if (!files.isEmpty()) {
|
||||
int corruptions = randomIntBetween(5, 20);
|
||||
for (int i = 0; i < corruptions; i++) {
|
||||
fileToCorrupt = RandomPicks.randomFrom(getRandom(), files);
|
||||
fileToCorrupt = RandomPicks.randomFrom(random(), files);
|
||||
try (FileChannel raf = FileChannel.open(fileToCorrupt, StandardOpenOption.READ, StandardOpenOption.WRITE)) {
|
||||
// read
|
||||
raf.position(randomIntBetween(0, (int) Math.min(Integer.MAX_VALUE, raf.size() - 1)));
|
||||
|
|
|
@ -132,7 +132,7 @@ public class IndexActionIT extends ESIntegTestCase {
|
|||
final AtomicIntegerArray createdCounts = new AtomicIntegerArray(docCount);
|
||||
ExecutorService threadPool = Executors.newFixedThreadPool(threadCount);
|
||||
List<Callable<Void>> tasks = new ArrayList<>(taskCount);
|
||||
final Random random = getRandom();
|
||||
final Random random = random();
|
||||
for (int i=0;i< taskCount; i++ ) {
|
||||
tasks.add(new Callable<Void>() {
|
||||
@Override
|
||||
|
|
|
@ -150,7 +150,7 @@ public class RecoverySourceHandlerTests extends ESTestCase {
|
|||
metas.add(md);
|
||||
}
|
||||
|
||||
CorruptionUtils.corruptFile(getRandom(), FileSystemUtils.files(tempDir, (p) ->
|
||||
CorruptionUtils.corruptFile(random(), FileSystemUtils.files(tempDir, (p) ->
|
||||
(p.getFileName().toString().equals("write.lock") ||
|
||||
p.getFileName().toString().startsWith("extra")) == false));
|
||||
Store targetStore = newStore(createTempDir(), false);
|
||||
|
|
|
@ -187,7 +187,7 @@ public class RareClusterStateIT extends ESIntegTestCase {
|
|||
nodes.remove(internalCluster().getMasterName());
|
||||
|
||||
// block none master node.
|
||||
BlockClusterStateProcessing disruption = new BlockClusterStateProcessing(nodes.iterator().next(), getRandom());
|
||||
BlockClusterStateProcessing disruption = new BlockClusterStateProcessing(nodes.iterator().next(), random());
|
||||
internalCluster().setDisruptionScheme(disruption);
|
||||
logger.info("--> indexing a doc");
|
||||
index("test", "type", "1");
|
||||
|
@ -247,7 +247,7 @@ public class RareClusterStateIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
// Block cluster state processing where our shard is
|
||||
BlockClusterStateProcessing disruption = new BlockClusterStateProcessing(otherNode, getRandom());
|
||||
BlockClusterStateProcessing disruption = new BlockClusterStateProcessing(otherNode, random());
|
||||
internalCluster().setDisruptionScheme(disruption);
|
||||
disruption.startDisrupting();
|
||||
|
||||
|
@ -365,7 +365,7 @@ public class RareClusterStateIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
// Block cluster state processing on the replica
|
||||
BlockClusterStateProcessing disruption = new BlockClusterStateProcessing(otherNode, getRandom());
|
||||
BlockClusterStateProcessing disruption = new BlockClusterStateProcessing(otherNode, random());
|
||||
internalCluster().setDisruptionScheme(disruption);
|
||||
disruption.startDisrupting();
|
||||
final AtomicReference<Object> putMappingResponse = new AtomicReference<>();
|
||||
|
|
|
@ -592,7 +592,7 @@ public class IndexStatsIT extends ESIntegTestCase {
|
|||
assertThat(isSet(flag, stats.getPrimaries()), equalTo(true));
|
||||
assertThat(isSet(flag, stats.getTotal()), equalTo(true));
|
||||
}
|
||||
Random random = getRandom();
|
||||
Random random = random();
|
||||
EnumSet<Flag> flags = EnumSet.noneOf(Flag.class);
|
||||
for (Flag flag : values) {
|
||||
if (random.nextBoolean()) {
|
||||
|
@ -638,7 +638,7 @@ public class IndexStatsIT extends ESIntegTestCase {
|
|||
flags.set(flag, true);
|
||||
}
|
||||
assertThat(flags.anySet(), equalTo(true));
|
||||
Random random = getRandom();
|
||||
Random random = random();
|
||||
flags.set(values[random.nextInt(values.length)], false);
|
||||
assertThat(flags.anySet(), equalTo(true));
|
||||
|
||||
|
|
|
@ -140,7 +140,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase {
|
|||
logger.info("--> move shard from node_1 to node_3, and wait for relocation to finish");
|
||||
|
||||
if (randomBoolean()) { // sometimes add cluster-state delay to trigger observers in IndicesStore.ShardActiveRequestHandler
|
||||
SingleNodeDisruption disruption = new BlockClusterStateProcessing(node_3, getRandom());
|
||||
SingleNodeDisruption disruption = new BlockClusterStateProcessing(node_3, random());
|
||||
internalCluster().setDisruptionScheme(disruption);
|
||||
MockTransportService transportServiceNode3 = (MockTransportService) internalCluster().getInstance(TransportService.class, node_3);
|
||||
CountDownLatch beginRelocationLatch = new CountDownLatch(1);
|
||||
|
|
|
@ -369,7 +369,7 @@ public class MultiValueModeTests extends ESTestCase {
|
|||
final FixedBitSet docsWithValue = randomBoolean() ? null : new FixedBitSet(numDocs);
|
||||
for (int i = 0; i < array.length; ++i) {
|
||||
if (randomBoolean()) {
|
||||
array[i] = new BytesRef(RandomStrings.randomAsciiOfLength(getRandom(), 8));
|
||||
array[i] = new BytesRef(RandomStrings.randomAsciiOfLength(random(), 8));
|
||||
if (docsWithValue != null) {
|
||||
docsWithValue.set(i);
|
||||
}
|
||||
|
@ -399,7 +399,7 @@ public class MultiValueModeTests extends ESTestCase {
|
|||
for (int i = 0; i < numDocs; ++i) {
|
||||
final BytesRef[] values = new BytesRef[randomInt(4)];
|
||||
for (int j = 0; j < values.length; ++j) {
|
||||
values[j] = new BytesRef(RandomStrings.randomAsciiOfLength(getRandom(), 8));
|
||||
values[j] = new BytesRef(RandomStrings.randomAsciiOfLength(random(), 8));
|
||||
}
|
||||
Arrays.sort(values);
|
||||
array[i] = values;
|
||||
|
@ -429,7 +429,7 @@ public class MultiValueModeTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private void verify(SortedBinaryDocValues values, int maxDoc) {
|
||||
for (BytesRef missingValue : new BytesRef[] { new BytesRef(), new BytesRef(RandomStrings.randomAsciiOfLength(getRandom(), 8)) }) {
|
||||
for (BytesRef missingValue : new BytesRef[] { new BytesRef(), new BytesRef(RandomStrings.randomAsciiOfLength(random(), 8)) }) {
|
||||
for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX}) {
|
||||
final BinaryDocValues selected = mode.select(values, missingValue);
|
||||
for (int i = 0; i < maxDoc; ++i) {
|
||||
|
@ -463,7 +463,7 @@ public class MultiValueModeTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private void verify(SortedBinaryDocValues values, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException {
|
||||
for (BytesRef missingValue : new BytesRef[] { new BytesRef(), new BytesRef(RandomStrings.randomAsciiOfLength(getRandom(), 8)) }) {
|
||||
for (BytesRef missingValue : new BytesRef[] { new BytesRef(), new BytesRef(RandomStrings.randomAsciiOfLength(random(), 8)) }) {
|
||||
for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX}) {
|
||||
final BinaryDocValues selected = mode.select(values, missingValue, rootDocs, new BitSetIterator(innerDocs, 0L), maxDoc);
|
||||
int prevRoot = -1;
|
||||
|
|
|
@ -260,7 +260,7 @@ public class AggregatorParsingTests extends ESTestCase {
|
|||
public void testInvalidAggregationName() throws Exception {
|
||||
Matcher matcher = Pattern.compile("[^\\[\\]>]+").matcher("");
|
||||
String name;
|
||||
Random rand = getRandom();
|
||||
Random rand = random();
|
||||
int len = randomIntBetween(1, 5);
|
||||
char[] word = new char[len];
|
||||
while (true) {
|
||||
|
|
|
@ -32,7 +32,7 @@ public class GeoDistanceRangeTests extends BaseAggregationTestCase<GeoDistanceAg
|
|||
@Override
|
||||
protected GeoDistanceAggregatorBuilder createTestAggregatorBuilder() {
|
||||
int numRanges = randomIntBetween(1, 10);
|
||||
GeoPoint origin = RandomShapeGenerator.randomPoint(getRandom());
|
||||
GeoPoint origin = RandomShapeGenerator.randomPoint(random());
|
||||
GeoDistanceAggregatorBuilder factory = new GeoDistanceAggregatorBuilder("foo", origin);
|
||||
for (int i = 0; i < numRanges; i++) {
|
||||
String key = null;
|
||||
|
|
|
@ -94,7 +94,7 @@ public class GeoHashGridIT extends ESIntegTestCase {
|
|||
.addMapping("type", "location", "type=geo_point", "city", "type=keyword"));
|
||||
|
||||
List<IndexRequestBuilder> cities = new ArrayList<>();
|
||||
Random random = getRandom();
|
||||
Random random = random();
|
||||
expectedDocCountsForGeoHash = new ObjectIntHashMap<>(numDocs * 2);
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
//generate random point
|
||||
|
|
|
@ -139,7 +139,7 @@ public class ScriptValuesTests extends ESTestCase {
|
|||
for (int i = 0; i < values.length; ++i) {
|
||||
String[] strings = new String[randomInt(8)];
|
||||
for (int j = 0; j < strings.length; ++j) {
|
||||
strings[j] = RandomStrings.randomAsciiOfLength(getRandom(), 5);
|
||||
strings[j] = RandomStrings.randomAsciiOfLength(random(), 5);
|
||||
}
|
||||
Arrays.sort(strings);
|
||||
values[i] = strings;
|
||||
|
|
|
@ -567,7 +567,7 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
protected static String randomhash(int length) {
|
||||
return randomhash(getRandom(), length);
|
||||
return randomhash(random(), length);
|
||||
}
|
||||
|
||||
protected static String randomhash(Random random) {
|
||||
|
@ -575,7 +575,7 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
protected static String randomhash() {
|
||||
return randomhash(getRandom());
|
||||
return randomhash(random());
|
||||
}
|
||||
|
||||
protected static String randomhash(Random random, int length) {
|
||||
|
|
|
@ -297,7 +297,7 @@ public class GeoShapeQueryTests extends ESSingleNodeTestCase {
|
|||
|
||||
public void testShapeFilterWithRandomGeoCollection() throws Exception {
|
||||
// Create a random geometry collection.
|
||||
GeometryCollectionBuilder gcb = RandomShapeGenerator.createGeometryCollection(getRandom());
|
||||
GeometryCollectionBuilder gcb = RandomShapeGenerator.createGeometryCollection(random());
|
||||
|
||||
logger.info("Created Random GeometryCollection containing {} shapes", gcb.numShapes());
|
||||
|
||||
|
@ -319,8 +319,8 @@ public class GeoShapeQueryTests extends ESSingleNodeTestCase {
|
|||
|
||||
public void testContainsShapeQuery() throws Exception {
|
||||
// Create a random geometry collection.
|
||||
Rectangle mbr = xRandomRectangle(getRandom(), xRandomPoint(getRandom()), true);
|
||||
GeometryCollectionBuilder gcb = createGeometryCollectionWithin(getRandom(), mbr);
|
||||
Rectangle mbr = xRandomRectangle(random(), xRandomPoint(random()), true);
|
||||
GeometryCollectionBuilder gcb = createGeometryCollectionWithin(random(), mbr);
|
||||
|
||||
client().admin().indices().prepareCreate("test").addMapping("type", "location", "type=geo_shape,tree=quadtree" )
|
||||
.execute().actionGet();
|
||||
|
|
|
@ -2030,7 +2030,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
refresh();
|
||||
final int iters = scaledRandomIntBetween(20, 30);
|
||||
for (int i = 0; i < iters; i++) {
|
||||
String highlighterType = rarely() ? null : RandomPicks.randomFrom(getRandom(), highlighterTypes);
|
||||
String highlighterType = rarely() ? null : RandomPicks.randomFrom(random(), highlighterTypes);
|
||||
MultiMatchQueryBuilder.Type[] supportedQueryTypes;
|
||||
if ("postings".equals(highlighterType)) {
|
||||
//phrase_prefix is not supported by postings highlighter, as it rewrites against an empty reader, the prefix will never match any term
|
||||
|
@ -2038,7 +2038,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
|
|||
} else {
|
||||
supportedQueryTypes = MultiMatchQueryBuilder.Type.values();
|
||||
}
|
||||
MultiMatchQueryBuilder.Type matchQueryType = RandomPicks.randomFrom(getRandom(), supportedQueryTypes);
|
||||
MultiMatchQueryBuilder.Type matchQueryType = RandomPicks.randomFrom(random(), supportedQueryTypes);
|
||||
final MultiMatchQueryBuilder multiMatchQueryBuilder = multiMatchQuery("the quick brown fox", "field1", "field2").type(matchQueryType);
|
||||
|
||||
SearchSourceBuilder source = searchSource()
|
||||
|
|
|
@ -130,7 +130,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
|
|||
fill(lastNames, "Captain", between(3, 7));
|
||||
fillRandom(lastNames, between(30, 40));
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
String first = RandomPicks.randomFrom(getRandom(), firstNames);
|
||||
String first = RandomPicks.randomFrom(random(), firstNames);
|
||||
String last = randomPickExcept(lastNames, first);
|
||||
builders.add(client().prepareIndex("test", "test", "" + i).setSource(
|
||||
"full_name", first + " " + last,
|
||||
|
@ -245,11 +245,11 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
|
|||
// check if it's equivalent to a match query.
|
||||
int numIters = scaledRandomIntBetween(10, 100);
|
||||
for (int i = 0; i < numIters; i++) {
|
||||
String field = RandomPicks.randomFrom(getRandom(), fields);
|
||||
String field = RandomPicks.randomFrom(random(), fields);
|
||||
int numTerms = randomIntBetween(1, query.length);
|
||||
StringBuilder builder = new StringBuilder();
|
||||
for (int j = 0; j < numTerms; j++) {
|
||||
builder.append(RandomPicks.randomFrom(getRandom(), query)).append(" ");
|
||||
builder.append(RandomPicks.randomFrom(random(), query)).append(" ");
|
||||
}
|
||||
MultiMatchQueryBuilder multiMatchQueryBuilder = randomizeType(multiMatchQuery(builder.toString(), field));
|
||||
SearchResponse multiMatchResp = client().prepareSearch("test")
|
||||
|
@ -661,7 +661,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase {
|
|||
|
||||
public <T> T randomPickExcept(List<T> fromList, T butNot) {
|
||||
while (true) {
|
||||
T t = RandomPicks.randomFrom(getRandom(), fromList);
|
||||
T t = RandomPicks.randomFrom(random(), fromList);
|
||||
if (t.equals(butNot)) {
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -73,7 +73,7 @@ public class QueryPhaseTests extends ESTestCase {
|
|||
private void countTestCase(boolean withDeletions) throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig iwc = newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE);
|
||||
RandomIndexWriter w = new RandomIndexWriter(getRandom(), dir, iwc);
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
|
||||
final int numDocs = scaledRandomIntBetween(100, 200);
|
||||
for (int i = 0; i < numDocs; ++i) {
|
||||
Document doc = new Document();
|
||||
|
|
|
@ -206,7 +206,7 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
|
||||
// see #3521
|
||||
public void testConstantScoreQuery() throws Exception {
|
||||
Random random = getRandom();
|
||||
Random random = random();
|
||||
createIndex("test");
|
||||
indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "quick brown fox", "field2", "quick brown fox"), client().prepareIndex("test", "type1", "2").setSource("field1", "quick lazy huge brown fox", "field2", "quick lazy huge brown fox"));
|
||||
|
||||
|
@ -218,11 +218,11 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
|
||||
searchResponse = client().prepareSearch("test").setQuery(
|
||||
boolQuery().must(matchAllQuery()).must(
|
||||
constantScoreQuery(matchQuery("field1", "quick")).boost(1.0f + getRandom().nextFloat()))).get();
|
||||
constantScoreQuery(matchQuery("field1", "quick")).boost(1.0f + random().nextFloat()))).get();
|
||||
assertHitCount(searchResponse, 2L);
|
||||
assertFirstHit(searchResponse, hasScore(searchResponse.getHits().getAt(1).score()));
|
||||
|
||||
client().prepareSearch("test").setQuery(constantScoreQuery(matchQuery("field1", "quick")).boost(1.0f + getRandom().nextFloat())).get();
|
||||
client().prepareSearch("test").setQuery(constantScoreQuery(matchQuery("field1", "quick")).boost(1.0f + random().nextFloat())).get();
|
||||
assertHitCount(searchResponse, 2L);
|
||||
assertFirstHit(searchResponse, hasScore(searchResponse.getHits().getAt(1).score()));
|
||||
|
||||
|
|
|
@ -203,7 +203,7 @@ public class DuelScrollIT extends ESIntegTestCase {
|
|||
}
|
||||
sort.order(randomBoolean() ? SortOrder.ASC : SortOrder.DESC);
|
||||
|
||||
SearchType searchType = RandomPicks.randomFrom(getRandom(), Arrays.asList(searchTypes));
|
||||
SearchType searchType = RandomPicks.randomFrom(random(), Arrays.asList(searchTypes));
|
||||
|
||||
logger.info("numDocs={}, scrollRequestSize={}, sort={}, searchType={}", numDocs, scrollRequestSize, sort, searchType);
|
||||
return new TestContext(numDocs, scrollRequestSize, sort, searchType);
|
||||
|
|
|
@ -221,7 +221,7 @@ public class FieldSortIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testRandomSorting() throws IOException, InterruptedException, ExecutionException {
|
||||
Random random = getRandom();
|
||||
Random random = random();
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type",
|
||||
XContentFactory.jsonBuilder()
|
||||
|
|
|
@ -54,13 +54,13 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase<GeoDistanc
|
|||
int count = randomIntBetween(1, 10);
|
||||
String[] geohashes = new String[count];
|
||||
for (int i = 0; i < count; i++) {
|
||||
geohashes[i] = RandomGeoGenerator.randomPoint(getRandom()).geohash();
|
||||
geohashes[i] = RandomGeoGenerator.randomPoint(random()).geohash();
|
||||
}
|
||||
|
||||
result = new GeoDistanceSortBuilder(fieldName, geohashes);
|
||||
break;
|
||||
case 1:
|
||||
GeoPoint pt = RandomGeoGenerator.randomPoint(getRandom());
|
||||
GeoPoint pt = RandomGeoGenerator.randomPoint(random());
|
||||
result = new GeoDistanceSortBuilder(fieldName, pt.getLat(), pt.getLon());
|
||||
break;
|
||||
case 2:
|
||||
|
@ -127,7 +127,7 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase<GeoDistanc
|
|||
int count = randomIntBetween(1, 10);
|
||||
result = new GeoPoint[count];
|
||||
for (int i = 0; i < count; i++) {
|
||||
result[i] = RandomGeoGenerator.randomPoint(getRandom());
|
||||
result[i] = RandomGeoGenerator.randomPoint(random());
|
||||
}
|
||||
}
|
||||
return result;
|
||||
|
@ -148,7 +148,7 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase<GeoDistanc
|
|||
switch (parameter) {
|
||||
case 0:
|
||||
while (Arrays.deepEquals(original.points(), result.points())) {
|
||||
GeoPoint pt = RandomGeoGenerator.randomPoint(getRandom());
|
||||
GeoPoint pt = RandomGeoGenerator.randomPoint(random());
|
||||
result.point(pt.getLat(), pt.getLon());
|
||||
}
|
||||
break;
|
||||
|
@ -194,7 +194,7 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase<GeoDistanc
|
|||
|
||||
public void testSortModeSumIsRejectedInSetter() {
|
||||
GeoDistanceSortBuilder builder = new GeoDistanceSortBuilder("testname", -1, -1);
|
||||
GeoPoint point = RandomGeoGenerator.randomPoint(getRandom());
|
||||
GeoPoint point = RandomGeoGenerator.randomPoint(random());
|
||||
builder.point(point.getLat(), point.getLon());
|
||||
try {
|
||||
builder.sortMode(SortMode.SUM);
|
||||
|
|
|
@ -83,9 +83,9 @@ import static org.hamcrest.Matchers.notNullValue;
|
|||
|
||||
@SuppressCodecs("*") // requires custom completion format
|
||||
public class CompletionSuggestSearchIT extends ESIntegTestCase {
|
||||
private final String INDEX = RandomStrings.randomAsciiOfLength(getRandom(), 10).toLowerCase(Locale.ROOT);
|
||||
private final String TYPE = RandomStrings.randomAsciiOfLength(getRandom(), 10).toLowerCase(Locale.ROOT);
|
||||
private final String FIELD = RandomStrings.randomAsciiOfLength(getRandom(), 10).toLowerCase(Locale.ROOT);
|
||||
private final String INDEX = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT);
|
||||
private final String TYPE = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT);
|
||||
private final String FIELD = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT);
|
||||
private final CompletionMappingBuilder completionMappingBuilder = new CompletionMappingBuilder();
|
||||
|
||||
public void testPrefix() throws Exception {
|
||||
|
@ -1175,8 +1175,8 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
|
|||
static class CompletionMappingBuilder {
|
||||
String searchAnalyzer = "simple";
|
||||
String indexAnalyzer = "simple";
|
||||
Boolean preserveSeparators = getRandom().nextBoolean();
|
||||
Boolean preservePositionIncrements = getRandom().nextBoolean();
|
||||
Boolean preserveSeparators = random().nextBoolean();
|
||||
Boolean preservePositionIncrements = random().nextBoolean();
|
||||
LinkedHashMap<String, ContextMapping> contextMappings = null;
|
||||
|
||||
public CompletionMappingBuilder searchAnalyzer(String searchAnalyzer) {
|
||||
|
|
|
@ -55,9 +55,9 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke
|
|||
@SuppressCodecs("*") // requires custom completion format
|
||||
public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
|
||||
|
||||
private final String INDEX = RandomStrings.randomAsciiOfLength(getRandom(), 10).toLowerCase(Locale.ROOT);
|
||||
private final String TYPE = RandomStrings.randomAsciiOfLength(getRandom(), 10).toLowerCase(Locale.ROOT);
|
||||
private final String FIELD = RandomStrings.randomAsciiOfLength(getRandom(), 10).toLowerCase(Locale.ROOT);
|
||||
private final String INDEX = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT);
|
||||
private final String TYPE = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT);
|
||||
private final String FIELD = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT);
|
||||
|
||||
@Override
|
||||
protected int numberOfReplicas() {
|
||||
|
|
|
@ -145,7 +145,7 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe
|
|||
* Test that a malformed JSON suggestion request fails.
|
||||
*/
|
||||
public void testMalformedJsonRequestPayload() throws Exception {
|
||||
final String field = RandomStrings.randomAsciiOfLength(getRandom(), 10).toLowerCase(Locale.ROOT);
|
||||
final String field = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT);
|
||||
final String payload = "{\n" +
|
||||
" \"bad-payload\" : { \n" +
|
||||
" \"prefix\" : \"sug\",\n" +
|
||||
|
|
|
@ -151,7 +151,7 @@ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCas
|
|||
assertEquals("suggestion field name is empty", e.getMessage());
|
||||
|
||||
TermSuggestionBuilder builder = new TermSuggestionBuilder(randomAsciiOfLengthBetween(2, 20));
|
||||
|
||||
|
||||
// test invalid accuracy values
|
||||
expectThrows(IllegalArgumentException.class, () -> builder.accuracy(-0.5f));
|
||||
expectThrows(IllegalArgumentException.class, () -> builder.accuracy(1.1f));
|
||||
|
@ -206,7 +206,7 @@ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCas
|
|||
}
|
||||
|
||||
public void testMalformedJson() {
|
||||
final String field = RandomStrings.randomAsciiOfLength(getRandom(), 10).toLowerCase(Locale.ROOT);
|
||||
final String field = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT);
|
||||
String suggest = "{\n" +
|
||||
" \"bad-payload\" : {\n" +
|
||||
" \"text\" : \"the amsterdma meetpu\",\n" +
|
||||
|
|
|
@ -72,11 +72,11 @@ public class SnapshotBackwardsCompatibilityIT extends ESBackcompatTestCase {
|
|||
logger.info("--> indexing some data");
|
||||
IndexRequestBuilder[] buildersBefore = new IndexRequestBuilder[randomIntBetween(10, 200)];
|
||||
for (int i = 0; i < buildersBefore.length; i++) {
|
||||
buildersBefore[i] = client().prepareIndex(RandomPicks.randomFrom(getRandom(), indicesBefore), "foo", Integer.toString(i)).setSource("{ \"foo\" : \"bar\" } ");
|
||||
buildersBefore[i] = client().prepareIndex(RandomPicks.randomFrom(random(), indicesBefore), "foo", Integer.toString(i)).setSource("{ \"foo\" : \"bar\" } ");
|
||||
}
|
||||
IndexRequestBuilder[] buildersAfter = new IndexRequestBuilder[randomIntBetween(10, 200)];
|
||||
for (int i = 0; i < buildersAfter.length; i++) {
|
||||
buildersAfter[i] = client().prepareIndex(RandomPicks.randomFrom(getRandom(), indicesBefore), "bar", Integer.toString(i)).setSource("{ \"foo\" : \"bar\" } ");
|
||||
buildersAfter[i] = client().prepareIndex(RandomPicks.randomFrom(random(), indicesBefore), "bar", Integer.toString(i)).setSource("{ \"foo\" : \"bar\" } ");
|
||||
}
|
||||
indexRandom(true, buildersBefore);
|
||||
indexRandom(true, buildersAfter);
|
||||
|
@ -97,7 +97,7 @@ public class SnapshotBackwardsCompatibilityIT extends ESBackcompatTestCase {
|
|||
int howMany = randomIntBetween(1, buildersBefore.length);
|
||||
|
||||
for (int i = 0; i < howMany; i++) {
|
||||
IndexRequestBuilder indexRequestBuilder = RandomPicks.randomFrom(getRandom(), buildersBefore);
|
||||
IndexRequestBuilder indexRequestBuilder = RandomPicks.randomFrom(random(), buildersBefore);
|
||||
IndexRequest request = indexRequestBuilder.request();
|
||||
client().prepareDelete(request.index(), request.type(), request.id()).get();
|
||||
}
|
||||
|
@ -144,7 +144,7 @@ public class SnapshotBackwardsCompatibilityIT extends ESBackcompatTestCase {
|
|||
|
||||
// Test restore after index deletion
|
||||
logger.info("--> delete indices");
|
||||
String index = RandomPicks.randomFrom(getRandom(), indices);
|
||||
String index = RandomPicks.randomFrom(random(), indices);
|
||||
cluster().wipeIndices(index);
|
||||
logger.info("--> restore one index after deletion");
|
||||
restoreSnapshotResponse = client().admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap-2").setWaitForCompletion(true).setIndices(index).execute().actionGet();
|
||||
|
|
|
@ -95,9 +95,9 @@ public class TribeIT extends ESIntegTestCase {
|
|||
|
||||
};
|
||||
cluster2 = new InternalTestCluster(InternalTestCluster.configuredNodeMode(), randomLong(), createTempDir(), 2, 2,
|
||||
Strings.randomBase64UUID(getRandom()), nodeConfigurationSource, 0, false, SECOND_CLUSTER_NODE_PREFIX, Collections.emptyList(), Function.identity());
|
||||
Strings.randomBase64UUID(random()), nodeConfigurationSource, 0, false, SECOND_CLUSTER_NODE_PREFIX, Collections.emptyList(), Function.identity());
|
||||
|
||||
cluster2.beforeTest(getRandom(), 0.1);
|
||||
cluster2.beforeTest(random(), 0.1);
|
||||
cluster2.ensureAtLeastNumDataNodes(2);
|
||||
}
|
||||
|
||||
|
|
|
@ -350,7 +350,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
|
||||
private IDSource getRandomIDs() {
|
||||
IDSource ids;
|
||||
final Random random = getRandom();
|
||||
final Random random = random();
|
||||
switch (random.nextInt(6)) {
|
||||
case 0:
|
||||
// random simple
|
||||
|
@ -516,7 +516,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
newSettings.put("index.gc_deletes", "1000000h");
|
||||
assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(newSettings).execute().actionGet());
|
||||
|
||||
Random random = getRandom();
|
||||
Random random = random();
|
||||
|
||||
// Generate random IDs:
|
||||
IDSource idSource = getRandomIDs();
|
||||
|
@ -596,7 +596,7 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
public void run() {
|
||||
try {
|
||||
//final Random threadRandom = RandomizedContext.current().getRandom();
|
||||
final Random threadRandom = getRandom();
|
||||
final Random threadRandom = random();
|
||||
startingGun.await();
|
||||
while (true) {
|
||||
|
||||
|
|
|
@ -84,7 +84,7 @@ public class MinDocCountTests extends AbstractTermsTestCase {
|
|||
for (int i = 0; i < cardinality; ++i) {
|
||||
String stringTerm;
|
||||
do {
|
||||
stringTerm = RandomStrings.randomAsciiOfLength(getRandom(), 8);
|
||||
stringTerm = RandomStrings.randomAsciiOfLength(random(), 8);
|
||||
} while (!stringTerms.add(stringTerm));
|
||||
long longTerm;
|
||||
do {
|
||||
|
|
|
@ -73,7 +73,7 @@ public abstract class ESAllocationTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public static MockAllocationService createAllocationService(Settings settings) {
|
||||
return createAllocationService(settings, getRandom());
|
||||
return createAllocationService(settings, random());
|
||||
}
|
||||
|
||||
public static MockAllocationService createAllocationService(Settings settings, Random random) {
|
||||
|
@ -88,13 +88,13 @@ public abstract class ESAllocationTestCase extends ESTestCase {
|
|||
|
||||
public static MockAllocationService createAllocationService(Settings settings, ClusterInfoService clusterInfoService) {
|
||||
return new MockAllocationService(settings,
|
||||
randomAllocationDeciders(settings, new ClusterSettings(Settings.Builder.EMPTY_SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), getRandom()),
|
||||
randomAllocationDeciders(settings, new ClusterSettings(Settings.Builder.EMPTY_SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), random()),
|
||||
NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(settings), clusterInfoService);
|
||||
}
|
||||
|
||||
public static MockAllocationService createAllocationService(Settings settings, GatewayAllocator gatewayAllocator) {
|
||||
return new MockAllocationService(settings,
|
||||
randomAllocationDeciders(settings, new ClusterSettings(Settings.Builder.EMPTY_SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), getRandom()),
|
||||
randomAllocationDeciders(settings, new ClusterSettings(Settings.Builder.EMPTY_SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), random()),
|
||||
gatewayAllocator, new BalancedShardsAllocator(settings), EmptyClusterInfoService.INSTANCE);
|
||||
}
|
||||
|
||||
|
|
|
@ -345,7 +345,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
default:
|
||||
fail("Unknown Scope: [" + currentClusterScope + "]");
|
||||
}
|
||||
cluster().beforeTest(getRandom(), getPerTestTransportClientRatio());
|
||||
cluster().beforeTest(random(), getPerTestTransportClientRatio());
|
||||
cluster().wipe(excludeTemplates());
|
||||
randomIndexTemplate();
|
||||
}
|
||||
|
@ -367,10 +367,10 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
// TODO move settings for random directory etc here into the index based randomized settings.
|
||||
if (cluster().size() > 0) {
|
||||
Settings.Builder randomSettingsBuilder =
|
||||
setRandomIndexSettings(getRandom(), Settings.builder());
|
||||
setRandomIndexSettings(random(), Settings.builder());
|
||||
if (isInternalCluster()) {
|
||||
// this is only used by mock plugins and if the cluster is not internal we just can't set it
|
||||
randomSettingsBuilder.put(INDEX_TEST_SEED_SETTING.getKey(), getRandom().nextLong());
|
||||
randomSettingsBuilder.put(INDEX_TEST_SEED_SETTING.getKey(), random().nextLong());
|
||||
}
|
||||
|
||||
randomSettingsBuilder.put(SETTING_NUMBER_OF_SHARDS, numberOfShards())
|
||||
|
@ -609,7 +609,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
}
|
||||
Client client = cluster().client();
|
||||
if (frequently()) {
|
||||
client = new RandomizingClient(client, getRandom());
|
||||
client = new RandomizingClient(client, random());
|
||||
}
|
||||
return client;
|
||||
}
|
||||
|
@ -617,7 +617,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
public static Client dataNodeClient() {
|
||||
Client client = internalCluster().dataNodeClient();
|
||||
if (frequently()) {
|
||||
client = new RandomizingClient(client, getRandom());
|
||||
client = new RandomizingClient(client, random());
|
||||
}
|
||||
return client;
|
||||
}
|
||||
|
@ -1318,7 +1318,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
*/
|
||||
public void indexRandom(boolean forceRefresh, boolean dummyDocuments, boolean maybeFlush, List<IndexRequestBuilder> builders) throws InterruptedException, ExecutionException {
|
||||
|
||||
Random random = getRandom();
|
||||
Random random = random();
|
||||
Set<String> indicesSet = new HashSet<>();
|
||||
for (IndexRequestBuilder builder : builders) {
|
||||
indicesSet.add(builder.request().index());
|
||||
|
@ -1992,7 +1992,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
* of the provided index.
|
||||
*/
|
||||
protected String routingKeyForShard(String index, String type, int shard) {
|
||||
return internalCluster().routingKeyForShard(resolveIndex(index), type, shard, getRandom());
|
||||
return internalCluster().routingKeyForShard(resolveIndex(index), type, shard, random());
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -208,16 +208,9 @@ public abstract class ESTestCase extends LuceneTestCase {
|
|||
// Test facilities and facades for subclasses.
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
// TODO: replaces uses of getRandom() with random()
|
||||
// TODO: decide on one set of naming for between/scaledBetween and remove others
|
||||
// TODO: replace frequently() with usually()
|
||||
|
||||
/** Shortcut for {@link RandomizedContext#getRandom()}. Use {@link #random()} instead. */
|
||||
public static Random getRandom() {
|
||||
// TODO: replace uses of this function with random()
|
||||
return random();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a "scaled" random number between min and max (inclusive).
|
||||
*
|
||||
|
|
|
@ -36,7 +36,7 @@ public class NetworkPartitionIT extends ESIntegTestCase {
|
|||
public void testNetworkPartitionWithNodeShutdown() throws IOException {
|
||||
internalCluster().ensureAtLeastNumDataNodes(2);
|
||||
String[] nodeNames = internalCluster().getNodeNames();
|
||||
NetworkPartition networkPartition = new NetworkUnresponsivePartition(nodeNames[0], nodeNames[1], getRandom());
|
||||
NetworkPartition networkPartition = new NetworkUnresponsivePartition(nodeNames[0], nodeNames[1], random());
|
||||
internalCluster().setDisruptionScheme(networkPartition);
|
||||
networkPartition.startDisrupting();
|
||||
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(nodeNames[0]));
|
||||
|
|
Loading…
Reference in New Issue