mirror of https://github.com/apache/openjpa.git
OPENJPA-544. Merge from ../active. svn merge -c 652523 ../active
git-svn-id: https://svn.apache.org/repos/asf/openjpa/trunk@666897 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
6f6634bbad
commit
8db8e5302e
|
@ -19,13 +19,17 @@
|
||||||
package org.apache.openjpa.lib.util.concurrent;
|
package org.apache.openjpa.lib.util.concurrent;
|
||||||
|
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Enumeration;
|
import java.util.Enumeration;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.AbstractSet;
|
import java.util.AbstractSet;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.AbstractCollection;
|
import java.util.AbstractCollection;
|
||||||
|
import java.util.Random;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.TreeSet;
|
||||||
|
|
||||||
|
import org.apache.commons.collections.set.MapBackedSet;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A subclass of {@link ConcurrentHashMap} that allows null keys and values.
|
* A subclass of {@link ConcurrentHashMap} that allows null keys and values.
|
||||||
|
@ -36,10 +40,20 @@ import java.util.AbstractCollection;
|
||||||
*/
|
*/
|
||||||
public class NullSafeConcurrentHashMap extends ConcurrentHashMap {
|
public class NullSafeConcurrentHashMap extends ConcurrentHashMap {
|
||||||
|
|
||||||
private enum Null {
|
private enum Markers {
|
||||||
MARKER
|
NULL,
|
||||||
|
MAP_BACKED_SET_DUMMY_VAL
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// The second argument is used within MapBackedSet as the value for
|
||||||
|
// all the key-val pairs that are put into the underlying Map. This
|
||||||
|
// is required for our usage since ConcurrentHashMap does not allow
|
||||||
|
// null values.
|
||||||
|
private Set randomKeys = MapBackedSet.decorate(
|
||||||
|
new ConcurrentHashMap(), Markers.MAP_BACKED_SET_DUMMY_VAL);
|
||||||
|
|
||||||
|
private Random random = new Random();
|
||||||
|
|
||||||
public NullSafeConcurrentHashMap(int size, float load,
|
public NullSafeConcurrentHashMap(int size, float load,
|
||||||
int concurrencyLevel) {
|
int concurrencyLevel) {
|
||||||
super(size, load, concurrencyLevel);
|
super(size, load, concurrencyLevel);
|
||||||
|
@ -52,24 +66,135 @@ public class NullSafeConcurrentHashMap extends ConcurrentHashMap {
|
||||||
* Returns internal representation for object.
|
* Returns internal representation for object.
|
||||||
*/
|
*/
|
||||||
private static Object maskNull(Object o) {
|
private static Object maskNull(Object o) {
|
||||||
return (o == null ? Null.MARKER : o);
|
return (o == null ? Markers.NULL : o);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns object represented by specified internal representation.
|
* Returns object represented by specified internal representation.
|
||||||
*/
|
*/
|
||||||
private static Object unmaskNull(Object o) {
|
private static Object unmaskNull(Object o) {
|
||||||
return (o == Null.MARKER ? null : o);
|
return (o == Markers.NULL ? null : o);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Entry removeRandom() {
|
||||||
|
// this doesn't just use randomEntryIterator() because that iterator
|
||||||
|
// has weaker concurrency guarantees than this method. In particular,
|
||||||
|
// this method will continue to attempt to remove random entries even
|
||||||
|
// as other threads remove the same entries, whereas the random
|
||||||
|
// iterator may return values that have been removed.
|
||||||
|
|
||||||
|
while (!isEmpty()) {
|
||||||
|
while (!randomKeys.isEmpty()) {
|
||||||
|
// randomKeys contains null-masked data
|
||||||
|
Iterator iter = randomKeys.iterator();
|
||||||
|
Object key = iter.next();
|
||||||
|
if (key != null && randomKeys.remove(key)) {
|
||||||
|
Object val = super.remove(key);
|
||||||
|
if (val != null)
|
||||||
|
return new EntryImpl(unmaskNull(key), unmaskNull(val));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// if randomKeys is empty, fall back to non-random behavior.
|
||||||
|
Object key = super.keySet().iterator().next();
|
||||||
|
Object val = super.remove(key);
|
||||||
|
if (val != null)
|
||||||
|
return new EntryImpl(unmaskNull(key), unmaskNull(val));
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The returned data structure should not be shared among multiple
|
||||||
|
* threads.
|
||||||
|
*/
|
||||||
|
public Iterator<Entry> randomEntryIterator() {
|
||||||
|
return new Iterator<Entry>() {
|
||||||
|
|
||||||
|
Iterator randomIter = randomKeys.iterator();
|
||||||
|
Iterator nonRandomIter = NullSafeConcurrentHashMap.super.keySet()
|
||||||
|
.iterator();
|
||||||
|
|
||||||
|
Set returned = new HashSet();
|
||||||
|
Entry next;
|
||||||
|
boolean nextSet = false;
|
||||||
|
|
||||||
|
public boolean hasNext() {
|
||||||
|
// we've set the next value and we haven't returned it yet
|
||||||
|
if (nextSet)
|
||||||
|
return true;
|
||||||
|
|
||||||
|
// compute the next value. If the computation returns null,
|
||||||
|
// return false. Else, store the next value and return true.
|
||||||
|
Object nextKey;
|
||||||
|
Object nextValue;
|
||||||
|
if (randomIter.hasNext()) {
|
||||||
|
nextKey = randomIter.next();
|
||||||
|
nextValue = NullSafeConcurrentHashMap.super.get(nextKey);
|
||||||
|
if (nextValue != null) {
|
||||||
|
returned.add(nextKey);
|
||||||
|
next = new EntryImpl(unmaskNull(nextKey),
|
||||||
|
unmaskNull(nextValue));
|
||||||
|
nextSet = true;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
while (nonRandomIter.hasNext()) {
|
||||||
|
nextKey = nonRandomIter.next();
|
||||||
|
|
||||||
|
if (returned.contains(nextKey))
|
||||||
|
continue;
|
||||||
|
|
||||||
|
nextValue = NullSafeConcurrentHashMap.super.get(nextKey);
|
||||||
|
if (nextValue != null) {
|
||||||
|
returned.add(nextKey);
|
||||||
|
next = new EntryImpl(unmaskNull(nextKey),
|
||||||
|
unmaskNull(nextValue));
|
||||||
|
nextSet = true;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Entry next() {
|
||||||
|
// hasNext() will initialize this.next
|
||||||
|
if (!nextSet && !hasNext())
|
||||||
|
return null;
|
||||||
|
|
||||||
|
// if we get here, then we're about to return a next value
|
||||||
|
nextSet = false;
|
||||||
|
|
||||||
|
if (containsKey(next.getKey()))
|
||||||
|
return next;
|
||||||
|
|
||||||
|
// something has changed since the last iteration (presumably
|
||||||
|
// due to multi-threaded access to the underlying data
|
||||||
|
// structure); recurse
|
||||||
|
return next();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void remove() {
|
||||||
|
throw new UnsupportedOperationException();
|
||||||
|
}
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object remove(Object key) {
|
public Object remove(Object key) {
|
||||||
return unmaskNull(super.remove(maskNull(key)));
|
Object maskedKey = maskNull(key);
|
||||||
|
Object val = unmaskNull(super.remove(maskedKey));
|
||||||
|
randomKeys.remove(maskedKey);
|
||||||
|
return val;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean remove(Object key, Object value) {
|
public boolean remove(Object key, Object value) {
|
||||||
return super.remove(maskNull(key), maskNull(value));
|
Object maskedKey = maskNull(key);
|
||||||
|
boolean val = super.remove(maskedKey, maskNull(value));
|
||||||
|
randomKeys.remove(maskedKey);
|
||||||
|
return val;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -85,12 +210,34 @@ public class NullSafeConcurrentHashMap extends ConcurrentHashMap {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object putIfAbsent(Object key, Object value) {
|
public Object putIfAbsent(Object key, Object value) {
|
||||||
return unmaskNull(super.putIfAbsent(maskNull(key), maskNull(value)));
|
Object maskedKey = maskNull(key);
|
||||||
|
Object superVal = super.putIfAbsent(maskedKey, maskNull(value));
|
||||||
|
addRandomKey(maskedKey);
|
||||||
|
return unmaskNull(superVal);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object put(Object key, Object value) {
|
public Object put(Object key, Object value) {
|
||||||
return unmaskNull(super.put(maskNull(key), maskNull(value)));
|
Object maskedKey = maskNull(key);
|
||||||
|
Object superVal = super.put(maskedKey, maskNull(value));
|
||||||
|
addRandomKey(maskedKey);
|
||||||
|
return unmaskNull(superVal);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Potentially adds <code>maskedKey</ccode> to the set of random keys
|
||||||
|
* to be removed by {@link #removeRandom()}.
|
||||||
|
*
|
||||||
|
* @since 1.1.0
|
||||||
|
*/
|
||||||
|
private void addRandomKey(Object maskedKey) {
|
||||||
|
// Add one in every three keys to the set. Only do this when
|
||||||
|
// there are less than 16 elements in the random key set; this
|
||||||
|
// means that the algorithm will be pseudo-random for up to
|
||||||
|
// 16 removes (either via removeRandom() or normal remove()
|
||||||
|
// calls) that have no intervening put() calls.
|
||||||
|
if (random != null && randomKeys.size() < 16 && random.nextInt(10) < 3)
|
||||||
|
randomKeys.add(maskedKey);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -160,11 +307,6 @@ public class NullSafeConcurrentHashMap extends ConcurrentHashMap {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void putAll(Map t) {
|
|
||||||
super.putAll(t);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Collection values() {
|
public Collection values() {
|
||||||
return new TranslatingCollection(super.values()) {
|
return new TranslatingCollection(super.values()) {
|
||||||
|
@ -238,4 +380,36 @@ public class NullSafeConcurrentHashMap extends ConcurrentHashMap {
|
||||||
return backingCollection.size();
|
return backingCollection.size();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private class EntryImpl implements Entry {
|
||||||
|
|
||||||
|
final Object key;
|
||||||
|
final Object val;
|
||||||
|
|
||||||
|
private EntryImpl(Object key, Object val) {
|
||||||
|
this.key = key;
|
||||||
|
this.val = val;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object getKey() {
|
||||||
|
return key;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object getValue() {
|
||||||
|
return val;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object setValue(Object value) {
|
||||||
|
throw new UnsupportedOperationException();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public interface KeyFilter {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param key may be null
|
||||||
|
* @return whether or not <code>key</code> shuold be excluded
|
||||||
|
*/
|
||||||
|
public boolean exclude(Object key);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -99,40 +99,6 @@ public class SizedConcurrentHashMap
|
||||||
return size() >= maxSize;
|
return size() >= maxSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Map.Entry removeRandom() {
|
|
||||||
// this isn't really random, but is concurrent.
|
|
||||||
while (true) {
|
|
||||||
if (size() == 0)
|
|
||||||
return null;
|
|
||||||
Set entries = entrySet();
|
|
||||||
Entry e = (Entry) entries.iterator().next();
|
|
||||||
final Object key = e.getKey();
|
|
||||||
final Object val = e.getValue();
|
|
||||||
if (remove(key) != null)
|
|
||||||
// create a new Entry instance because the ConcurrentHashMap
|
|
||||||
// implementation's one is "live" so does not behave as desired
|
|
||||||
// after removing the entry.
|
|
||||||
return new Entry() {
|
|
||||||
public Object getKey() {
|
|
||||||
return key;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Object getValue() {
|
|
||||||
return val;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Object setValue(Object value) {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public Iterator randomEntryIterator() {
|
|
||||||
// this isn't really random, but is concurrent.
|
|
||||||
return entrySet().iterator();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This implementation does nothing.
|
* This implementation does nothing.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -19,34 +19,80 @@
|
||||||
package org.apache.openjpa.lib.util.concurrent;
|
package org.apache.openjpa.lib.util.concurrent;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.concurrent.ConcurrentMap;
|
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Enumeration;
|
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
|
|
||||||
import org.apache.openjpa.lib.test.AbstractTestCase;
|
import org.apache.openjpa.lib.test.AbstractTestCase;
|
||||||
|
|
||||||
public class TestNullSafeConcurrentHashMap extends AbstractTestCase {
|
public class TestNullSafeConcurrentHashMap extends AbstractTestCase {
|
||||||
|
|
||||||
private Map newMap() {
|
private NullSafeConcurrentHashMap newMap() {
|
||||||
return new NullSafeConcurrentHashMap();
|
return new NullSafeConcurrentHashMap();
|
||||||
// return new HashMap();
|
}
|
||||||
|
|
||||||
|
public void testRemoveRandomIsNotTotallyDeterministic() {
|
||||||
|
removeHelper(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testRandomIteratorIsNotTotallyDeterministic() {
|
||||||
|
removeHelper(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void removeHelper(boolean iter) {
|
||||||
|
Map<String,Integer> removedCounts = new HashMap();
|
||||||
|
for (int i = 0; i < 1000; i++) {
|
||||||
|
NullSafeConcurrentHashMap m = new NullSafeConcurrentHashMap();
|
||||||
|
m.put("a", "A");
|
||||||
|
m.put("b", "B");
|
||||||
|
m.put("c", "C");
|
||||||
|
m.put("d", "D");
|
||||||
|
m.put("e", "E");
|
||||||
|
m.put("f", "F");
|
||||||
|
m.put("g", "G");
|
||||||
|
|
||||||
|
String removed;
|
||||||
|
if (iter) {
|
||||||
|
removed = (String) m.removeRandom().getKey();
|
||||||
|
} else {
|
||||||
|
removed = (String) ((Entry) m.randomEntryIterator().next())
|
||||||
|
.getKey();
|
||||||
|
m.remove(removed);
|
||||||
|
}
|
||||||
|
|
||||||
|
Integer count = removedCounts.get(removed);
|
||||||
|
if (count == null)
|
||||||
|
removedCounts.put(removed, 1);
|
||||||
|
else
|
||||||
|
removedCounts.put(removed, count.intValue() + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// assume that over 1000 runs, every element should be removed at
|
||||||
|
// least once, and no element should be removed more than 30% of
|
||||||
|
// the time
|
||||||
|
assertEquals(7, removedCounts.size());
|
||||||
|
for (Entry<String,Integer> entry : removedCounts.entrySet()) {
|
||||||
|
if (entry.getValue() == 0)
|
||||||
|
fail("element " + entry.getKey() + " was never removed");
|
||||||
|
if (entry.getValue() > 500)
|
||||||
|
fail("element " + entry.getKey() + " was removed "
|
||||||
|
+ entry.getValue() + " times; this is greater than the "
|
||||||
|
+ "threshold of 500.");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNullKeys() throws ClassNotFoundException, IOException {
|
public void testNullKeys() throws ClassNotFoundException, IOException {
|
||||||
Map m = newMap();
|
helper(null, "value 0", "value 1", "value 2");
|
||||||
helper(m, null, "value 0", "value 1", "value 2");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void helper(Map m, Object key, Object value0,
|
private void helper(Object key, Object value0,
|
||||||
Object value1, Object value2)
|
Object value1, Object value2)
|
||||||
throws IOException, ClassNotFoundException {
|
throws IOException, ClassNotFoundException {
|
||||||
|
|
||||||
|
NullSafeConcurrentHashMap m = newMap();
|
||||||
|
|
||||||
// initial put
|
// initial put
|
||||||
m.put(key, value0);
|
m.put(key, value0);
|
||||||
|
|
||||||
|
@ -77,7 +123,6 @@ public class TestNullSafeConcurrentHashMap extends AbstractTestCase {
|
||||||
|
|
||||||
// put
|
// put
|
||||||
assertEquals(value0, m.put(key, value1));
|
assertEquals(value0, m.put(key, value1));
|
||||||
// m.putAll(); #####
|
|
||||||
|
|
||||||
// remove
|
// remove
|
||||||
assertEquals(value1, m.put(key, value1));
|
assertEquals(value1, m.put(key, value1));
|
||||||
|
@ -85,42 +130,50 @@ public class TestNullSafeConcurrentHashMap extends AbstractTestCase {
|
||||||
m.put(key, value1);
|
m.put(key, value1);
|
||||||
|
|
||||||
// ConcurrentMap stuff
|
// ConcurrentMap stuff
|
||||||
ConcurrentMap cm = (ConcurrentMap) m;
|
assertFalse(m.remove("invalid key", value0));
|
||||||
assertFalse(cm.remove("invalid key", value0));
|
assertTrue(m.remove(key, value1));
|
||||||
assertTrue(cm.remove(key, value1));
|
assertNull(m.putIfAbsent(key, value0)); // null == prev unset
|
||||||
assertNull(cm.putIfAbsent(key, value0)); // null == prev unset
|
|
||||||
|
|
||||||
// value0 might be null; can't disambiguate from above in OpenJPA
|
// value0 might be null; can't disambiguate from above in OpenJPA
|
||||||
// interpretation
|
// interpretation
|
||||||
assertEquals(value0, cm.putIfAbsent(key, "invalid value"));
|
assertEquals(value0, m.putIfAbsent(key, "invalid value"));
|
||||||
|
|
||||||
// replace
|
// replace
|
||||||
assertEquals(value0, cm.replace(key, value1));
|
assertEquals(value0, m.replace(key, value1));
|
||||||
assertTrue(cm.replace(key, value1, value2));
|
assertTrue(m.replace(key, value1, value2));
|
||||||
|
|
||||||
|
// putAll. Note that ConcurrentHashMap happens to delegate to put()
|
||||||
|
// from within putAll() calls. This test should help ensure that we
|
||||||
|
// find out if that changes.
|
||||||
|
m = newMap();
|
||||||
|
Map putAllArg = new HashMap();
|
||||||
|
putAllArg.put(key, value0);
|
||||||
|
putAllArg.put("another key", value1);
|
||||||
|
m.putAll(putAllArg);
|
||||||
|
assertEquals(value0, m.get(key));
|
||||||
|
assertEquals(value1, m.get("another key"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNullValues() throws ClassNotFoundException, IOException {
|
public void testNullValues() throws ClassNotFoundException, IOException {
|
||||||
Map m = newMap();
|
nullValsHelper("foo");
|
||||||
nullValsHelper(m, "foo");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void nullValsHelper(Map m, Object key)
|
private void nullValsHelper(Object key)
|
||||||
throws IOException, ClassNotFoundException {
|
throws IOException, ClassNotFoundException {
|
||||||
helper(m, key, null, null, null);
|
helper(key, null, null, null);
|
||||||
helper(m, key, "bar", "baz", "quux");
|
helper(key, "bar", "baz", "quux");
|
||||||
|
|
||||||
helper(m, key, "bar", "baz", null);
|
helper(key, "bar", "baz", null);
|
||||||
helper(m, key, null, "baz", "quux");
|
helper(key, null, "baz", "quux");
|
||||||
helper(m, key, "bar", null, "quux");
|
helper(key, "bar", null, "quux");
|
||||||
|
|
||||||
helper(m, key, "bar", null, null);
|
helper(key, "bar", null, null);
|
||||||
helper(m, key, null, "baz", null);
|
helper(key, null, "baz", null);
|
||||||
helper(m, key, null, null, "quux");
|
helper(key, null, null, "quux");
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNullKeysAndValues()
|
public void testNullKeysAndValues()
|
||||||
throws ClassNotFoundException, IOException {
|
throws ClassNotFoundException, IOException {
|
||||||
Map m = newMap();
|
nullValsHelper(null);
|
||||||
nullValsHelper(m, null);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue