LUCENE-5996: Collections.shuffle(List) is now a forbidden API.

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1629920 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Adrien Grand 2014-10-07 16:22:39 +00:00
parent 5b906eeb1b
commit 3f99989544
11 changed files with 31 additions and 19 deletions

View File

@ -162,10 +162,7 @@ public abstract class StringHelper {
if (prop != null) {
// So if there is a test failure that relied on hash
// order, we remain reproducible based on the test seed:
if (prop.length() > 8) {
prop = prop.substring(prop.length()-8);
}
GOOD_FAST_HASH_SEED = (int) Long.parseLong(prop, 16);
GOOD_FAST_HASH_SEED = prop.hashCode();
} else {
GOOD_FAST_HASH_SEED = (int) System.currentTimeMillis();
}

View File

@ -261,7 +261,7 @@ public class Test2BTerms extends LuceneTestCase {
private void testSavedTerms(IndexReader r, List<BytesRef> terms) throws IOException {
System.out.println("TEST: run " + terms.size() + " terms on reader=" + r);
IndexSearcher s = newSearcher(r);
Collections.shuffle(terms);
Collections.shuffle(terms, random());
TermsEnum termsEnum = MultiFields.getTerms(r, "field").iterator(null);
boolean failed = false;
for(int iter=0;iter<10*terms.size();iter++) {

View File

@ -122,7 +122,7 @@ public class TestIndexReaderClose extends LuceneTestCase {
leafReader.addCoreClosedListener(listeners.get(random().nextInt(listeners.size())));
}
final int removed = random().nextInt(numListeners);
Collections.shuffle(listeners);
Collections.shuffle(listeners, random());
for (int i = 0; i < removed; ++i) {
leafReader.removeCoreClosedListener(listeners.get(i));
}

View File

@ -93,7 +93,7 @@ public class TestSameScoresWithThreads extends LuceneTestCase {
startingGun.await();
for(int i=0;i<20;i++) {
List<Map.Entry<BytesRef,TopDocs>> shuffled = new ArrayList<>(answers.entrySet());
Collections.shuffle(shuffled);
Collections.shuffle(shuffled, random());
for(Map.Entry<BytesRef,TopDocs> ent : shuffled) {
TopDocs actual = s.search(new TermQuery(new Term("body", ent.getKey())), 100);
TopDocs expected = ent.getValue();

View File

@ -34,14 +34,14 @@ public class TestSparseFixedBitSet extends BaseDocIdSetTestCase<SparseFixedBitSe
for (int doc = bs.nextSetBit(0); doc != -1; doc = bs.nextSetBit(doc + 1)) {
buffer.add(doc);
if (buffer.size() >= 100000) {
Collections.shuffle(buffer);
Collections.shuffle(buffer, random());
for (int i : buffer) {
set.set(i);
}
buffer.clear();
}
}
Collections.shuffle(buffer);
Collections.shuffle(buffer, random());
for (int i : buffer) {
set.set(i);
}

View File

@ -125,7 +125,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
w.addDocument(doc);
if (rand.nextInt(50) == 17) {
// mixup binding of field name -> Number every so often
Collections.shuffle(fieldIDs);
Collections.shuffle(fieldIDs, random());
}
if (rand.nextInt(5) == 3 && i > 0) {
final String delID = ""+rand.nextInt(i);

View File

@ -31,3 +31,6 @@ java.lang.Character#codePointBefore(char[],int) @ Implicit start offset is error
java.lang.Character#codePointAt(char[],int) @ Implicit end offset is error-prone when the char[] is a buffer and the last chars are random chars
java.io.File#delete() @ use Files.delete for real exception, IOUtils.deleteFilesIgnoringExceptions if you dont care
@defaultMessage Use shuffle(List, Random) instead so that it can be reproduced
java.util.Collections#shuffle(java.util.List)

View File

@ -23,7 +23,6 @@ import static org.apache.solr.common.cloud.ZkStateReader.REPLICA_PROP;
import static org.apache.solr.common.cloud.ZkStateReader.PROPERTY_PROP;
import static org.apache.solr.common.cloud.ZkStateReader.PROPERTY_VALUE_PROP;
import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICA;
import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICAPROP;
import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDROLE;
@ -48,6 +47,7 @@ import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.SynchronousQueue;
@ -164,6 +164,18 @@ public class OverseerCollectionProcessor implements Runnable, Closeable {
ZkStateReader.MAX_SHARDS_PER_NODE, "1",
ZkStateReader.AUTO_ADD_REPLICAS, "false");
private static final Random RANDOM;
static {
// We try to make things reproducible in the context of our tests by initializing the random instance
// based on the current seed
String seed = System.getProperty("tests.seed");
if (seed == null) {
RANDOM = new Random();
} else {
RANDOM = new Random(seed.hashCode());
}
}
public ExecutorService tpe ;
private static Logger log = LoggerFactory
@ -1623,8 +1635,8 @@ public class OverseerCollectionProcessor implements Runnable, Closeable {
Set<String> nodes = clusterState.getLiveNodes();
List<String> nodeList = new ArrayList<>(nodes.size());
nodeList.addAll(nodes);
Collections.shuffle(nodeList);
Collections.shuffle(nodeList, RANDOM);
// TODO: Have maxShardsPerNode param for this operation?
@ -1634,7 +1646,7 @@ public class OverseerCollectionProcessor implements Runnable, Closeable {
// TODO: change this to handle sharding a slice into > 2 sub-shards.
for (int i = 1; i <= subSlices.size(); i++) {
Collections.shuffle(nodeList);
Collections.shuffle(nodeList, RANDOM);
String sliceName = subSlices.get(i - 1);
for (int j = 2; j <= repFactor; j++) {
String subShardNodeName = nodeList.get((repFactor * (i - 1) + (j - 2)) % nodeList.size());
@ -2284,7 +2296,7 @@ public class OverseerCollectionProcessor implements Runnable, Closeable {
List<String> nodeList = new ArrayList<>(nodes.size());
nodeList.addAll(nodes);
if (createNodeList != null) nodeList.retainAll(createNodeList);
Collections.shuffle(nodeList);
Collections.shuffle(nodeList, RANDOM);
if (nodeList.size() <= 0) {
throw new SolrException(ErrorCode.BAD_REQUEST, "Cannot create collection " + collectionName

View File

@ -1266,7 +1266,7 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
createCollection(collectionName, client, 2, 2);
String newReplicaName = Assign.assignNode(collectionName, client.getZkStateReader().getClusterState());
ArrayList<String> nodeList = new ArrayList<>(client.getZkStateReader().getClusterState().getLiveNodes());
Collections.shuffle(nodeList);
Collections.shuffle(nodeList, random());
CollectionAdminRequest.AddReplica addReplica = new CollectionAdminRequest.AddReplica();
addReplica.setCollectionName(collectionName);
addReplica.setShardName("shard1");

View File

@ -137,7 +137,7 @@ public class OverseerRolesTest extends AbstractFullDistribZkTestBase{
log.info("Current leader {} ", currentLeader);
l.remove(currentLeader);
Collections.shuffle(l);
Collections.shuffle(l, random());
String overseerDesignate = l.get(0);
log.info("overseerDesignate {}",overseerDesignate);
setOverseerRole(CollectionAction.ADDROLE,overseerDesignate);
@ -165,7 +165,7 @@ public class OverseerRolesTest extends AbstractFullDistribZkTestBase{
l.remove(overseerDesignate);
Collections.shuffle(l);
Collections.shuffle(l, random());
String anotherOverseer = l.get(0);
log.info("Adding another overseer designate {}", anotherOverseer);

View File

@ -198,7 +198,7 @@ public class AddBlockUpdateTest extends SolrTestCaseJ4 {
block("Y"),
block("Z")));
Collections.shuffle(blocks);
Collections.shuffle(blocks, random());
log.trace("{}", blocks);