Calculate threshold before putting data in deserialization to avoid memory issue

bug 34265, reported by Marcos Cesar de Oliveira

git-svn-id: https://svn.apache.org/repos/asf/jakarta/commons/proper/collections/trunk@169102 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Stephen Colebourne 2005-05-07 18:04:32 +00:00
parent d9796b6160
commit e52c4c50e1
2 changed files with 2 additions and 1 deletions

View File

@ -64,6 +64,7 @@ If this causes major headaches to anyone please contact commons-dev at jakarta.a
<li>AbstractLinkedMap.init() - Now calls createEntry() to create the map entry object [33706]</li>
<li>BeanMap.initialize() - Internal variable now correctly initialised with only write methods that actually exist [15895]</li>
<li>TransformedMap.putAll - Now allows putAll of an empty map [34686]</li>
<li>AbstractHashedMap deserialization - Fix to prevent doubling of internal data array [34265]</li>
</ul>
<center><h3>JAVADOC</h3></center>

View File

@ -1204,13 +1204,13 @@ public class AbstractHashedMap extends AbstractMap implements IterableMap {
int capacity = in.readInt();
int size = in.readInt();
init();
threshold = calculateThreshold(capacity, loadFactor);
data = new HashEntry[capacity];
for (int i = 0; i < size; i++) {
Object key = in.readObject();
Object value = in.readObject();
put(key, value);
}
threshold = calculateThreshold(data.length, loadFactor);
}
//-----------------------------------------------------------------------