mirror of https://github.com/apache/openjpa.git
OPENJPA-544. JDK1.5 conversion hangover. Introduce pseudo-random behavior into NullSafeConcurrentHashMap. This is important for the various data structures in OpenJPA and derivitaves (mostly caches) that rely on pseudo-random cache removal. These data structures were exacerbated by the fact that ConcurrentHashMap seems to return an Iterator that is approximately LIFO, which is often the worst possible algorithm. This change is pseudo-random in the face of 16 random removes / accesses per put() call; this behavior is hard coded and could be tweaked to become configurable / adaptive.
git-svn-id: https://svn.apache.org/repos/asf/openjpa/branches/1.1.x@652523 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
3ac1f08072
commit
d8b82311f5
|
@ -19,13 +19,17 @@
|
|||
package org.apache.openjpa.lib.util.concurrent;
|
||||
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Enumeration;
|
||||
import java.util.Set;
|
||||
import java.util.Collection;
|
||||
import java.util.AbstractSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.AbstractCollection;
|
||||
import java.util.Random;
|
||||
import java.util.HashSet;
|
||||
import java.util.TreeSet;
|
||||
|
||||
import org.apache.commons.collections.set.MapBackedSet;
|
||||
|
||||
/**
|
||||
* A subclass of {@link ConcurrentHashMap} that allows null keys and values.
|
||||
|
@ -36,10 +40,20 @@ import java.util.AbstractCollection;
|
|||
*/
|
||||
public class NullSafeConcurrentHashMap extends ConcurrentHashMap {
|
||||
|
||||
private enum Null {
|
||||
MARKER
|
||||
private enum Markers {
|
||||
NULL,
|
||||
MAP_BACKED_SET_DUMMY_VAL
|
||||
}
|
||||
|
||||
// The second argument is used within MapBackedSet as the value for
|
||||
// all the key-val pairs that are put into the underlying Map. This
|
||||
// is required for our usage since ConcurrentHashMap does not allow
|
||||
// null values.
|
||||
private Set randomKeys = MapBackedSet.decorate(
|
||||
new ConcurrentHashMap(), Markers.MAP_BACKED_SET_DUMMY_VAL);
|
||||
|
||||
private Random random = new Random();
|
||||
|
||||
public NullSafeConcurrentHashMap(int size, float load,
|
||||
int concurrencyLevel) {
|
||||
super(size, load, concurrencyLevel);
|
||||
|
@ -52,24 +66,135 @@ public class NullSafeConcurrentHashMap extends ConcurrentHashMap {
|
|||
* Returns internal representation for object.
|
||||
*/
|
||||
private static Object maskNull(Object o) {
|
||||
return (o == null ? Null.MARKER : o);
|
||||
return (o == null ? Markers.NULL : o);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns object represented by specified internal representation.
|
||||
*/
|
||||
private static Object unmaskNull(Object o) {
|
||||
return (o == Null.MARKER ? null : o);
|
||||
return (o == Markers.NULL ? null : o);
|
||||
}
|
||||
|
||||
public Entry removeRandom() {
|
||||
// this doesn't just use randomEntryIterator() because that iterator
|
||||
// has weaker concurrency guarantees than this method. In particular,
|
||||
// this method will continue to attempt to remove random entries even
|
||||
// as other threads remove the same entries, whereas the random
|
||||
// iterator may return values that have been removed.
|
||||
|
||||
while (!isEmpty()) {
|
||||
while (!randomKeys.isEmpty()) {
|
||||
// randomKeys contains null-masked data
|
||||
Iterator iter = randomKeys.iterator();
|
||||
Object key = iter.next();
|
||||
if (key != null && randomKeys.remove(key)) {
|
||||
Object val = super.remove(key);
|
||||
if (val != null)
|
||||
return new EntryImpl(unmaskNull(key), unmaskNull(val));
|
||||
}
|
||||
}
|
||||
|
||||
// if randomKeys is empty, fall back to non-random behavior.
|
||||
Object key = super.keySet().iterator().next();
|
||||
Object val = super.remove(key);
|
||||
if (val != null)
|
||||
return new EntryImpl(unmaskNull(key), unmaskNull(val));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* The returned data structure should not be shared among multiple
|
||||
* threads.
|
||||
*/
|
||||
public Iterator<Entry> randomEntryIterator() {
|
||||
return new Iterator<Entry>() {
|
||||
|
||||
Iterator randomIter = randomKeys.iterator();
|
||||
Iterator nonRandomIter = NullSafeConcurrentHashMap.super.keySet()
|
||||
.iterator();
|
||||
|
||||
Set returned = new HashSet();
|
||||
Entry next;
|
||||
boolean nextSet = false;
|
||||
|
||||
public boolean hasNext() {
|
||||
// we've set the next value and we haven't returned it yet
|
||||
if (nextSet)
|
||||
return true;
|
||||
|
||||
// compute the next value. If the computation returns null,
|
||||
// return false. Else, store the next value and return true.
|
||||
Object nextKey;
|
||||
Object nextValue;
|
||||
if (randomIter.hasNext()) {
|
||||
nextKey = randomIter.next();
|
||||
nextValue = NullSafeConcurrentHashMap.super.get(nextKey);
|
||||
if (nextValue != null) {
|
||||
returned.add(nextKey);
|
||||
next = new EntryImpl(unmaskNull(nextKey),
|
||||
unmaskNull(nextValue));
|
||||
nextSet = true;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
while (nonRandomIter.hasNext()) {
|
||||
nextKey = nonRandomIter.next();
|
||||
|
||||
if (returned.contains(nextKey))
|
||||
continue;
|
||||
|
||||
nextValue = NullSafeConcurrentHashMap.super.get(nextKey);
|
||||
if (nextValue != null) {
|
||||
returned.add(nextKey);
|
||||
next = new EntryImpl(unmaskNull(nextKey),
|
||||
unmaskNull(nextValue));
|
||||
nextSet = true;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public Entry next() {
|
||||
// hasNext() will initialize this.next
|
||||
if (!nextSet && !hasNext())
|
||||
return null;
|
||||
|
||||
// if we get here, then we're about to return a next value
|
||||
nextSet = false;
|
||||
|
||||
if (containsKey(next.getKey()))
|
||||
return next;
|
||||
|
||||
// something has changed since the last iteration (presumably
|
||||
// due to multi-threaded access to the underlying data
|
||||
// structure); recurse
|
||||
return next();
|
||||
}
|
||||
|
||||
public void remove() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object remove(Object key) {
|
||||
return unmaskNull(super.remove(maskNull(key)));
|
||||
Object maskedKey = maskNull(key);
|
||||
Object val = unmaskNull(super.remove(maskedKey));
|
||||
randomKeys.remove(maskedKey);
|
||||
return val;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(Object key, Object value) {
|
||||
return super.remove(maskNull(key), maskNull(value));
|
||||
Object maskedKey = maskNull(key);
|
||||
boolean val = super.remove(maskedKey, maskNull(value));
|
||||
randomKeys.remove(maskedKey);
|
||||
return val;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -85,12 +210,34 @@ public class NullSafeConcurrentHashMap extends ConcurrentHashMap {
|
|||
|
||||
@Override
|
||||
public Object putIfAbsent(Object key, Object value) {
|
||||
return unmaskNull(super.putIfAbsent(maskNull(key), maskNull(value)));
|
||||
Object maskedKey = maskNull(key);
|
||||
Object superVal = super.putIfAbsent(maskedKey, maskNull(value));
|
||||
addRandomKey(maskedKey);
|
||||
return unmaskNull(superVal);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object put(Object key, Object value) {
|
||||
return unmaskNull(super.put(maskNull(key), maskNull(value)));
|
||||
Object maskedKey = maskNull(key);
|
||||
Object superVal = super.put(maskedKey, maskNull(value));
|
||||
addRandomKey(maskedKey);
|
||||
return unmaskNull(superVal);
|
||||
}
|
||||
|
||||
/**
|
||||
* Potentially adds <code>maskedKey</ccode> to the set of random keys
|
||||
* to be removed by {@link #removeRandom()}.
|
||||
*
|
||||
* @since 1.1.0
|
||||
*/
|
||||
private void addRandomKey(Object maskedKey) {
|
||||
// Add one in every three keys to the set. Only do this when
|
||||
// there are less than 16 elements in the random key set; this
|
||||
// means that the algorithm will be pseudo-random for up to
|
||||
// 16 removes (either via removeRandom() or normal remove()
|
||||
// calls) that have no intervening put() calls.
|
||||
if (random != null && randomKeys.size() < 16 && random.nextInt(10) < 3)
|
||||
randomKeys.add(maskedKey);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -160,11 +307,6 @@ public class NullSafeConcurrentHashMap extends ConcurrentHashMap {
|
|||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public void putAll(Map t) {
|
||||
super.putAll(t);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection values() {
|
||||
return new TranslatingCollection(super.values()) {
|
||||
|
@ -238,4 +380,36 @@ public class NullSafeConcurrentHashMap extends ConcurrentHashMap {
|
|||
return backingCollection.size();
|
||||
}
|
||||
}
|
||||
|
||||
private class EntryImpl implements Entry {
|
||||
|
||||
final Object key;
|
||||
final Object val;
|
||||
|
||||
private EntryImpl(Object key, Object val) {
|
||||
this.key = key;
|
||||
this.val = val;
|
||||
}
|
||||
|
||||
public Object getKey() {
|
||||
return key;
|
||||
}
|
||||
|
||||
public Object getValue() {
|
||||
return val;
|
||||
}
|
||||
|
||||
public Object setValue(Object value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
||||
|
||||
public interface KeyFilter {
|
||||
|
||||
/**
|
||||
* @param key may be null
|
||||
* @return whether or not <code>key</code> shuold be excluded
|
||||
*/
|
||||
public boolean exclude(Object key);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -99,40 +99,6 @@ public class SizedConcurrentHashMap
|
|||
return size() >= maxSize;
|
||||
}
|
||||
|
||||
public Map.Entry removeRandom() {
|
||||
// this isn't really random, but is concurrent.
|
||||
while (true) {
|
||||
if (size() == 0)
|
||||
return null;
|
||||
Set entries = entrySet();
|
||||
Entry e = (Entry) entries.iterator().next();
|
||||
final Object key = e.getKey();
|
||||
final Object val = e.getValue();
|
||||
if (remove(key) != null)
|
||||
// create a new Entry instance because the ConcurrentHashMap
|
||||
// implementation's one is "live" so does not behave as desired
|
||||
// after removing the entry.
|
||||
return new Entry() {
|
||||
public Object getKey() {
|
||||
return key;
|
||||
}
|
||||
|
||||
public Object getValue() {
|
||||
return val;
|
||||
}
|
||||
|
||||
public Object setValue(Object value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
public Iterator randomEntryIterator() {
|
||||
// this isn't really random, but is concurrent.
|
||||
return entrySet().iterator();
|
||||
}
|
||||
|
||||
/**
|
||||
* This implementation does nothing.
|
||||
*/
|
||||
|
|
|
@ -41,7 +41,7 @@ public class TestConcurrentMap extends AbstractTestCase {
|
|||
private static final int SLEEP = 3;
|
||||
|
||||
private ConcurrentMap[] _maps = new ConcurrentMap[]{
|
||||
new SizedConcurrentHashMap(ENTRIES, .75f, 16),
|
||||
new SizedConcurrentHashMap(ENTRIES, .75f, 16),
|
||||
new ConcurrentReferenceHashMap(ReferenceMap.HARD, ReferenceMap.HARD), };
|
||||
|
||||
public void setUp() throws Exception {
|
||||
|
|
|
@ -19,34 +19,80 @@
|
|||
package org.apache.openjpa.lib.util.concurrent;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Set;
|
||||
import java.util.Collection;
|
||||
import java.util.Map;
|
||||
import java.util.HashMap;
|
||||
import java.util.Enumeration;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.apache.openjpa.lib.test.AbstractTestCase;
|
||||
|
||||
public class TestNullSafeConcurrentHashMap extends AbstractTestCase {
|
||||
|
||||
private Map newMap() {
|
||||
private NullSafeConcurrentHashMap newMap() {
|
||||
return new NullSafeConcurrentHashMap();
|
||||
// return new HashMap();
|
||||
}
|
||||
|
||||
public void testRemoveRandomIsNotTotallyDeterministic() {
|
||||
removeHelper(false);
|
||||
}
|
||||
|
||||
public void testRandomIteratorIsNotTotallyDeterministic() {
|
||||
removeHelper(true);
|
||||
}
|
||||
|
||||
private void removeHelper(boolean iter) {
|
||||
Map<String,Integer> removedCounts = new HashMap();
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
NullSafeConcurrentHashMap m = new NullSafeConcurrentHashMap();
|
||||
m.put("a", "A");
|
||||
m.put("b", "B");
|
||||
m.put("c", "C");
|
||||
m.put("d", "D");
|
||||
m.put("e", "E");
|
||||
m.put("f", "F");
|
||||
m.put("g", "G");
|
||||
|
||||
String removed;
|
||||
if (iter) {
|
||||
removed = (String) m.removeRandom().getKey();
|
||||
} else {
|
||||
removed = (String) ((Entry) m.randomEntryIterator().next())
|
||||
.getKey();
|
||||
m.remove(removed);
|
||||
}
|
||||
|
||||
Integer count = removedCounts.get(removed);
|
||||
if (count == null)
|
||||
removedCounts.put(removed, 1);
|
||||
else
|
||||
removedCounts.put(removed, count.intValue() + 1);
|
||||
}
|
||||
|
||||
// assume that over 1000 runs, every element should be removed at
|
||||
// least once, and no element should be removed more than 30% of
|
||||
// the time
|
||||
assertEquals(7, removedCounts.size());
|
||||
for (Entry<String,Integer> entry : removedCounts.entrySet()) {
|
||||
if (entry.getValue() == 0)
|
||||
fail("element " + entry.getKey() + " was never removed");
|
||||
if (entry.getValue() > 500)
|
||||
fail("element " + entry.getKey() + " was removed "
|
||||
+ entry.getValue() + " times; this is greater than the "
|
||||
+ "threshold of 500.");
|
||||
}
|
||||
}
|
||||
|
||||
public void testNullKeys() throws ClassNotFoundException, IOException {
|
||||
Map m = newMap();
|
||||
helper(m, null, "value 0", "value 1", "value 2");
|
||||
helper(null, "value 0", "value 1", "value 2");
|
||||
}
|
||||
|
||||
private void helper(Map m, Object key, Object value0,
|
||||
private void helper(Object key, Object value0,
|
||||
Object value1, Object value2)
|
||||
throws IOException, ClassNotFoundException {
|
||||
|
||||
NullSafeConcurrentHashMap m = newMap();
|
||||
|
||||
// initial put
|
||||
m.put(key, value0);
|
||||
|
||||
|
@ -77,7 +123,6 @@ public class TestNullSafeConcurrentHashMap extends AbstractTestCase {
|
|||
|
||||
// put
|
||||
assertEquals(value0, m.put(key, value1));
|
||||
// m.putAll(); #####
|
||||
|
||||
// remove
|
||||
assertEquals(value1, m.put(key, value1));
|
||||
|
@ -85,42 +130,50 @@ public class TestNullSafeConcurrentHashMap extends AbstractTestCase {
|
|||
m.put(key, value1);
|
||||
|
||||
// ConcurrentMap stuff
|
||||
ConcurrentMap cm = (ConcurrentMap) m;
|
||||
assertFalse(cm.remove("invalid key", value0));
|
||||
assertTrue(cm.remove(key, value1));
|
||||
assertNull(cm.putIfAbsent(key, value0)); // null == prev unset
|
||||
assertFalse(m.remove("invalid key", value0));
|
||||
assertTrue(m.remove(key, value1));
|
||||
assertNull(m.putIfAbsent(key, value0)); // null == prev unset
|
||||
|
||||
// value0 might be null; can't disambiguate from above in OpenJPA
|
||||
// interpretation
|
||||
assertEquals(value0, cm.putIfAbsent(key, "invalid value"));
|
||||
assertEquals(value0, m.putIfAbsent(key, "invalid value"));
|
||||
|
||||
// replace
|
||||
assertEquals(value0, cm.replace(key, value1));
|
||||
assertTrue(cm.replace(key, value1, value2));
|
||||
assertEquals(value0, m.replace(key, value1));
|
||||
assertTrue(m.replace(key, value1, value2));
|
||||
|
||||
// putAll. Note that ConcurrentHashMap happens to delegate to put()
|
||||
// from within putAll() calls. This test should help ensure that we
|
||||
// find out if that changes.
|
||||
m = newMap();
|
||||
Map putAllArg = new HashMap();
|
||||
putAllArg.put(key, value0);
|
||||
putAllArg.put("another key", value1);
|
||||
m.putAll(putAllArg);
|
||||
assertEquals(value0, m.get(key));
|
||||
assertEquals(value1, m.get("another key"));
|
||||
}
|
||||
|
||||
public void testNullValues() throws ClassNotFoundException, IOException {
|
||||
Map m = newMap();
|
||||
nullValsHelper(m, "foo");
|
||||
nullValsHelper("foo");
|
||||
}
|
||||
|
||||
private void nullValsHelper(Map m, Object key)
|
||||
private void nullValsHelper(Object key)
|
||||
throws IOException, ClassNotFoundException {
|
||||
helper(m, key, null, null, null);
|
||||
helper(m, key, "bar", "baz", "quux");
|
||||
helper(key, null, null, null);
|
||||
helper(key, "bar", "baz", "quux");
|
||||
|
||||
helper(m, key, "bar", "baz", null);
|
||||
helper(m, key, null, "baz", "quux");
|
||||
helper(m, key, "bar", null, "quux");
|
||||
helper(key, "bar", "baz", null);
|
||||
helper(key, null, "baz", "quux");
|
||||
helper(key, "bar", null, "quux");
|
||||
|
||||
helper(m, key, "bar", null, null);
|
||||
helper(m, key, null, "baz", null);
|
||||
helper(m, key, null, null, "quux");
|
||||
helper(key, "bar", null, null);
|
||||
helper(key, null, "baz", null);
|
||||
helper(key, null, null, "quux");
|
||||
}
|
||||
|
||||
public void testNullKeysAndValues()
|
||||
throws ClassNotFoundException, IOException {
|
||||
Map m = newMap();
|
||||
nullValsHelper(m, null);
|
||||
nullValsHelper(null);
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue