OPENJPA-2441: Remove usage of NullSafeConcurrentHashMap. Patch contributed by Dalia Abo Sheasha.

git-svn-id: https://svn.apache.org/repos/asf/openjpa/trunk@1570261 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Richard G. Curtis 2014-02-20 16:33:30 +00:00
parent f01f7c4721
commit 6d8a46e825
8 changed files with 108 additions and 362 deletions

View File

@ -40,9 +40,10 @@ import org.apache.openjpa.lib.conf.Configuration;
import org.apache.openjpa.lib.log.Log;
import org.apache.openjpa.lib.util.J2DoPrivHelper;
import org.apache.openjpa.lib.util.Localizer;
import org.apache.openjpa.lib.util.ReferenceMap;
import org.apache.openjpa.lib.util.concurrent.AbstractConcurrentEventManager;
import org.apache.openjpa.lib.util.concurrent.ConcurrentReferenceHashMap;
import org.apache.openjpa.lib.util.concurrent.ConcurrentReferenceHashSet;
import org.apache.openjpa.lib.util.concurrent.SizedConcurrentHashMap;
import org.apache.openjpa.meta.ClassMetaData;
import org.apache.openjpa.meta.MetaDataRepository;
import org.apache.openjpa.util.Id;
@ -480,11 +481,27 @@ public abstract class AbstractQueryCache
private long[] astat = new long[ARRAY_SIZE];
private long[] stat = new long[ARRAY_SIZE];
private Map<T, long[]> stats = new SizedConcurrentHashMap(FIXED_SIZE, LOAD_FACTOR, CONCURRENCY);
private Map<T, long[]> astats = new SizedConcurrentHashMap(FIXED_SIZE, LOAD_FACTOR, CONCURRENCY);
private Map<T, long[]> stats;
private Map<T, long[]> astats;
private Date start = new Date();
private Date since = start;
public Default() {
initializeMaps();
}
private void initializeMaps() {
ConcurrentReferenceHashMap statsMap =
new ConcurrentReferenceHashMap(ReferenceMap.HARD, ReferenceMap.HARD, CONCURRENCY, LOAD_FACTOR);
statsMap.setMaxSize(FIXED_SIZE);
stats = statsMap;
ConcurrentReferenceHashMap aStatsMap =
new ConcurrentReferenceHashMap(ReferenceMap.HARD, ReferenceMap.HARD, CONCURRENCY, LOAD_FACTOR);
aStatsMap.setMaxSize(FIXED_SIZE);
astats = aStatsMap;
}
public Set<T> keys() {
return stats.keySet();
}
@ -552,8 +569,7 @@ public abstract class AbstractQueryCache
public synchronized void clear() {
astat = new long[ARRAY_SIZE];
stat = new long[ARRAY_SIZE];
stats = new SizedConcurrentHashMap(FIXED_SIZE, LOAD_FACTOR, CONCURRENCY);
astats = new SizedConcurrentHashMap(FIXED_SIZE, LOAD_FACTOR, CONCURRENCY);
initializeMaps();
start = new Date();
since = start;
}

View File

@ -25,7 +25,8 @@ import java.util.Date;
import java.util.Map;
import java.util.Set;
import org.apache.openjpa.lib.util.concurrent.SizedConcurrentHashMap;
import org.apache.openjpa.lib.util.ReferenceMap;
import org.apache.openjpa.lib.util.concurrent.ConcurrentReferenceHashMap;
/**
* Records query execution statistics.
@ -153,11 +154,27 @@ public interface QueryStatistics<T> extends Serializable {
private long[] astat = new long[ARRAY_SIZE];
private long[] stat = new long[ARRAY_SIZE];
private Map<T, long[]> stats = new SizedConcurrentHashMap(FIXED_SIZE, LOAD_FACTOR, CONCURRENCY);
private Map<T, long[]> astats = new SizedConcurrentHashMap(FIXED_SIZE, LOAD_FACTOR, CONCURRENCY);
private Map<T, long[]> stats;
private Map<T, long[]> astats;
private Date start = new Date();
private Date since = start;
public Default() {
initializeMaps();
}
private void initializeMaps() {
ConcurrentReferenceHashMap statsMap =
new ConcurrentReferenceHashMap(ReferenceMap.HARD, ReferenceMap.HARD, CONCURRENCY, LOAD_FACTOR);
statsMap.setMaxSize(FIXED_SIZE);
stats = statsMap;
ConcurrentReferenceHashMap aStatsMap =
new ConcurrentReferenceHashMap(ReferenceMap.HARD, ReferenceMap.HARD, CONCURRENCY, LOAD_FACTOR);
aStatsMap.setMaxSize(FIXED_SIZE);
astats = aStatsMap;
}
public Set<T> keys() {
return stats.keySet();
}
@ -216,8 +233,7 @@ public interface QueryStatistics<T> extends Serializable {
public synchronized void clear() {
astat = new long[ARRAY_SIZE];
stat = new long[ARRAY_SIZE];
stats = new SizedConcurrentHashMap(FIXED_SIZE, LOAD_FACTOR, CONCURRENCY);
astats = new SizedConcurrentHashMap(FIXED_SIZE, LOAD_FACTOR, CONCURRENCY);
initializeMaps();
start = new Date();
since = start;
}

View File

@ -56,8 +56,8 @@ import org.apache.openjpa.lib.util.Files;
import org.apache.openjpa.lib.util.J2DoPrivHelper;
import org.apache.openjpa.lib.util.Localizer;
import org.apache.openjpa.lib.util.Options;
import org.apache.openjpa.lib.util.concurrent.NullSafeConcurrentHashMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import serp.bytecode.BCClass;
@ -94,8 +94,8 @@ public class ProxyManagerImpl
_stdMaps.put(SortedMap.class, TreeMap.class);
}
private final Set _unproxyable = new HashSet();
private final Map _proxies = new NullSafeConcurrentHashMap();
private final Set<String> _unproxyable = new HashSet<String>();
private final Map<Class<?>, Proxy> _proxies = new ConcurrentHashMap<Class<?>, Proxy>();
private boolean _trackChanges = true;
private boolean _assertType = false;
private boolean _delayedCollectionLoading = false;
@ -453,20 +453,18 @@ public class ProxyManagerImpl
* Return the cached factory proxy for the given bean type.
*/
private ProxyBean getFactoryProxyBean(Object orig) {
final Class type = orig.getClass();
final Class<?> type = orig.getClass();
if (isUnproxyable(type))
return null;
// we don't lock here; ok if two proxies get generated for same type
ProxyBean proxy = (ProxyBean) _proxies.get(type);
if (proxy == null && !_proxies.containsKey(type)) {
ClassLoader l = GeneratedClasses.getMostDerivedLoader(type,
ProxyBean.class);
Class pcls = loadBuildTimeProxy(type, l);
if (proxy == null) {
ClassLoader l = GeneratedClasses.getMostDerivedLoader(type, ProxyBean.class);
Class<?> pcls = loadBuildTimeProxy(type, l);
if (pcls == null) {
// TODO Move this to J2DOPrivHelper?
BCClass bc = AccessController
.doPrivileged(new PrivilegedAction<BCClass>() {
BCClass bc = AccessController.doPrivileged(new PrivilegedAction<BCClass>() {
public BCClass run() {
return generateProxyBeanBytecode(type, true);
}
@ -475,10 +473,13 @@ public class ProxyManagerImpl
pcls = GeneratedClasses.loadBCClass(bc, l);
}
if (pcls != null)
proxy = (ProxyBean) instantiateProxy(pcls,
findCopyConstructor(type), new Object[] {orig});
proxy = (ProxyBean) instantiateProxy(pcls, findCopyConstructor(type), new Object[] { orig });
if (proxy == null) {
_unproxyable.add(type.getName());
} else {
_proxies.put(type, proxy);
}
}
return proxy;
}

View File

@ -740,9 +740,36 @@ public class TestProxyManager extends TestCase {
NonproxyableBean orig = new NonproxyableBean(1);
populate(orig);
assertNull(_mgr.copyCustom(orig));
assertNull(_mgr.copyCustom(orig));
assertNull(_mgr.newCustomProxy(orig, true));
}
public void testIsUnproxyable() {
CustomBean validBean = new CustomBean();
populate(validBean);
assertNotNull(_mgr.copyCustom(validBean));
assertNotNull(_mgr.newCustomProxy(validBean, true));
assertFalse(_mgr.isUnproxyable(CustomBean.class));
NonproxyableBean bean1 = new NonproxyableBean(1);
populate(bean1);
NonproxyableBean2 bean2 = new NonproxyableBean2();
populate(bean2);
assertFalse(_mgr.isUnproxyable(NonproxyableBean.class));
assertNull(_mgr.copyCustom(bean1));
assertTrue(_mgr.isUnproxyable(NonproxyableBean.class));
assertNull(_mgr.newCustomProxy(bean1, true));
assertTrue(_mgr.isUnproxyable(NonproxyableBean.class));
assertFalse(_mgr.isUnproxyable(NonproxyableBean2.class));
assertNull(_mgr.newCustomProxy(bean2, true));
assertTrue(_mgr.isUnproxyable(NonproxyableBean2.class));
assertNull(_mgr.copyCustom(bean2));
assertTrue(_mgr.isUnproxyable(NonproxyableBean2.class));
}
/**
* Assert that the given beans are exactly the same.
*/
@ -948,6 +975,13 @@ public class TestProxyManager extends TestCase {
}
}
/**
* Used to non-proxyable custom bean handling.
*/
public class NonproxyableBean2 extends CustomBean {
// class is not static
}
/**
* Used to test custom calendar handling.
*/

View File

@ -31,12 +31,15 @@ import java.util.HashSet;
import org.apache.commons.collections.set.MapBackedSet;
/**
* A subclass of {@link ConcurrentHashMap} that allows null keys and values.
* In exchange, it weakens the contract of {@link #putIfAbsent} and the other
* concurrent methods added in {@link #ConcurrentHashMap}.
* A subclass of {@link ConcurrentHashMap} that allows null keys and values. In exchange, it weakens the contract of
* {@link #putIfAbsent} and the other concurrent methods added in {@link #ConcurrentHashMap}.
*
* @since 1.1.0
* @deprecated In Java 8, java.util.ConcurrentHashMap received an overhauled and this extension was not updated. This
* class will fail to compile on Java 8. If it is compiled at a lower level and run on Java 8 it will not
* work properly. For more information: https://issues.apache.org/jira/browse/OPENJPA-2441
*/
@Deprecated
public class NullSafeConcurrentHashMap extends ConcurrentHashMap {
private enum Markers {

View File

@ -30,7 +30,11 @@ import org.apache.openjpa.lib.util.SizedMap;
* An implementation of {@link SizedMap} that uses JDK1.5 concurrency primitives
*
* @since 1.1.0
* @deprecated In Java 8, java.util.ConcurrentHashMap received an overhauled and this extension was not updated. This
* class will fail to compile on Java 8. If it is compiled at a lower level and run on Java 8 it will not
* work properly. For more information: https://issues.apache.org/jira/browse/OPENJPA-2441
*/
@Deprecated
public class SizedConcurrentHashMap
extends NullSafeConcurrentHashMap
implements SizedMap, ConcurrentMap, Serializable {

View File

@ -1,149 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.openjpa.lib.util.concurrent;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import org.apache.openjpa.lib.util.ReferenceMap;
import org.apache.openjpa.lib.test.AbstractTestCase;
/**
* Tests the methods of {@link ConcurrentMap}.
*
* @author Abe White
*/
public class TestConcurrentMap extends AbstractTestCase {
private static final int ENTRIES = 333;
private static final int SLEEP = 3;
private ConcurrentMap[] _maps = new ConcurrentMap[]{
new SizedConcurrentHashMap(ENTRIES, .75f, 16),
new ConcurrentReferenceHashMap(ReferenceMap.HARD, ReferenceMap.HARD), };
public void setUp() throws Exception {
super.setUp();
for (int i = 0; i < ENTRIES; i++) {
for (int j = 0; j < _maps.length; j++) {
int key = j * ENTRIES + i;
_maps[j].put(new Integer(key), "v" + key);
}
}
for (int i = 0; i < _maps.length; i++)
assertEquals(ENTRIES, _maps[i].size());
}
public void testRemoveRandom() {
Set keys = new TreeSet();
for (int i = 0; i < ENTRIES; i++)
for (int j = 0; j < _maps.length; j++)
assertTrue(removeRandom(_maps[j], keys));
postRemoveTest(keys);
}
private static boolean removeRandom(ConcurrentMap map, Set keys) {
Map.Entry rem = map.removeRandom();
return rem != null && rem.getValue().equals("v" + rem.getKey())
&& keys.add(rem.getKey());
}
private void postRemoveTest(Set keys) {
for (int i = 0; i < _maps.length; i++) {
assertTrue(_maps[i].isEmpty());
assertTrue(!_maps[i].containsKey(new Integer(ENTRIES * i + i)));
}
assertEquals(keys.toString(), ENTRIES * _maps.length, keys.size());
}
public synchronized void testRemoveRandomThreaded()
throws InterruptedException {
Set keys = Collections.synchronizedSet(new TreeSet());
RemoveRandomRunnable[] runs =
new RemoveRandomRunnable[ENTRIES * _maps.length];
for (int i = 0; i < ENTRIES; i++)
for (int j = 0; j < _maps.length; j++)
runs[j * ENTRIES + i] = new RemoveRandomRunnable
(_maps[j], keys);
for (int i = 0; i < runs.length; i++)
new Thread(runs[i]).start();
Thread.currentThread().sleep(SLEEP * ENTRIES * _maps.length);
for (int i = 0; i < runs.length; i++) {
assertTrue(String.valueOf(i), !runs[i].error);
if (runs[i].interrupted)
throw new InterruptedException(String.valueOf(i));
}
postRemoveTest(keys);
}
public void testIterate() {
iterationTest(false);
}
private List iterationTest(boolean random) {
Set keys = new TreeSet();
List ordered = new ArrayList(200);
for (int i = 0; i < _maps.length; i++) {
Iterator itr = (random) ? _maps[i].randomEntryIterator()
: _maps[i].entrySet().iterator();
while (itr.hasNext()) {
Map.Entry entry = (Map.Entry) itr.next();
assertEquals("v" + entry.getKey(), entry.getValue());
assertTrue(keys + ":: " + _maps[i].getClass() + "::"
+ entry.getKey() + "::" + entry.getValue(),
keys.add(entry.getKey()));
ordered.add(entry.getKey());
}
}
assertEquals(keys.toString(), ENTRIES * _maps.length, keys.size());
return ordered;
}
public void testRandomIterate() {
iterationTest(true);
}
private static class RemoveRandomRunnable implements Runnable {
public boolean error = false;
public boolean interrupted = false;
private final ConcurrentMap _map;
private final Set _keys;
public RemoveRandomRunnable(ConcurrentMap map, Set keys) {
_map = map;
_keys = keys;
}
public synchronized void run() {
try {
Thread.currentThread().sleep((long) (Math.random() * SLEEP));
} catch (InterruptedException ie) {
interrupted = true;
}
error = !removeRandom(_map, _keys);
}
}
}

View File

@ -1,179 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.openjpa.lib.util.concurrent;
import java.io.IOException;
import java.util.Set;
import java.util.Collection;
import java.util.Map;
import java.util.HashMap;
import java.util.Map.Entry;
import org.apache.openjpa.lib.test.AbstractTestCase;
public class TestNullSafeConcurrentHashMap extends AbstractTestCase {
private NullSafeConcurrentHashMap newMap() {
return new NullSafeConcurrentHashMap();
}
public void testRemoveRandomIsNotTotallyDeterministic() {
removeHelper(false);
}
public void testRandomIteratorIsNotTotallyDeterministic() {
removeHelper(true);
}
private void removeHelper(boolean iter) {
Map<String,Integer> removedCounts = new HashMap();
for (int i = 0; i < 1000; i++) {
NullSafeConcurrentHashMap m = new NullSafeConcurrentHashMap();
m.put("a", "A");
m.put("b", "B");
m.put("c", "C");
m.put("d", "D");
m.put("e", "E");
m.put("f", "F");
m.put("g", "G");
String removed;
if (iter) {
removed = (String) m.removeRandom().getKey();
} else {
removed = (String) ((Entry) m.randomEntryIterator().next())
.getKey();
m.remove(removed);
}
Integer count = removedCounts.get(removed);
if (count == null)
removedCounts.put(removed, 1);
else
removedCounts.put(removed, count.intValue() + 1);
}
// assume that over 1000 runs, every element should be removed at
// least once, and no element should be removed more than 30% of
// the time
assertEquals(7, removedCounts.size());
for (Entry<String,Integer> entry : removedCounts.entrySet()) {
if (entry.getValue() == 0)
fail("element " + entry.getKey() + " was never removed");
if (entry.getValue() > 500)
fail("element " + entry.getKey() + " was removed "
+ entry.getValue() + " times; this is greater than the "
+ "threshold of 500.");
}
}
public void testNullKeys() throws ClassNotFoundException, IOException {
helper(null, "value 0", "value 1", "value 2");
}
private void helper(Object key, Object value0,
Object value1, Object value2)
throws IOException, ClassNotFoundException {
NullSafeConcurrentHashMap m = newMap();
// initial put
m.put(key, value0);
// get etc.
assertEquals(value0, m.get(key));
assertTrue(m.containsKey(key));
assertTrue(m.containsValue(value0));
// keySet
Set keys = m.keySet();
assertTrue(keys.contains(key));
assertEquals(1, keys.size());
assertEquals(key, keys.iterator().next());
// entrySet
Set entries = m.entrySet();
Entry e = (Entry) entries.iterator().next();
assertEquals(key, e.getKey());
assertEquals(value0, e.getValue());
// values
Collection values = m.values();
assertEquals(1, values.size());
assertEquals(value0, values.iterator().next());
// serializability
assertEquals(m, roundtrip(m, true));
// put
assertEquals(value0, m.put(key, value1));
// remove
assertEquals(value1, m.put(key, value1));
assertEquals(value1, m.remove(key));
m.put(key, value1);
// ConcurrentMap stuff
assertFalse(m.remove("invalid key", value0));
assertTrue(m.remove(key, value1));
assertNull(m.putIfAbsent(key, value0)); // null == prev unset
// value0 might be null; can't disambiguate from above in OpenJPA
// interpretation
assertEquals(value0, m.putIfAbsent(key, "invalid value"));
// replace
assertEquals(value0, m.replace(key, value1));
assertTrue(m.replace(key, value1, value2));
// putAll. Note that ConcurrentHashMap happens to delegate to put()
// from within putAll() calls. This test should help ensure that we
// find out if that changes.
m = newMap();
Map putAllArg = new HashMap();
putAllArg.put(key, value0);
putAllArg.put("another key", value1);
m.putAll(putAllArg);
assertEquals(value0, m.get(key));
assertEquals(value1, m.get("another key"));
}
public void testNullValues() throws ClassNotFoundException, IOException {
nullValsHelper("foo");
}
private void nullValsHelper(Object key)
throws IOException, ClassNotFoundException {
helper(key, null, null, null);
helper(key, "bar", "baz", "quux");
helper(key, "bar", "baz", null);
helper(key, null, "baz", "quux");
helper(key, "bar", null, "quux");
helper(key, "bar", null, null);
helper(key, null, "baz", null);
helper(key, null, null, "quux");
}
public void testNullKeysAndValues()
throws ClassNotFoundException, IOException {
nullValsHelper(null);
}
}