BAEL-4686: Review comments implemented

This commit is contained in:
Amitabh Tiwari 2020-11-30 07:12:58 +05:30
parent 2a1baae599
commit 074d016b64
5 changed files with 101 additions and 227 deletions

View File

@ -15,7 +15,11 @@
</parent>
<dependencies>
<dependency>
<groupId>org.openjdk.jmh</groupId>
<artifactId>jmh-core</artifactId>
<version>${jmh-core.version}</version>
</dependency>
</dependencies>
<properties>

View File

@ -0,0 +1,96 @@
package com.baeldung.map.concurrenthashmap;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Threads;
import org.openjdk.jmh.annotations.Warmup;
@Fork(5)
@Threads(10)
@Warmup(iterations = 5)
@State(Scope.Benchmark)
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
public class MapPerformanceComparison {
private int TEST_NO_ITEMS;
public static void main(String[] args) throws Exception {
org.openjdk.jmh.Main.main(args);
}
@Setup
public void setup() {
TEST_NO_ITEMS = 1000;
}
@Benchmark
public void randomReadAndWriteSynchronizedMap() {
Map<String, Integer> map = Collections.synchronizedMap(new HashMap<String, Integer>());
performReadAndWriteTest(map);
}
@Benchmark
public void randomReadAndWriteConcurrentHashMap() {
Map<String, Integer> map = new ConcurrentHashMap<>();
performReadAndWriteTest(map);
}
private void performReadAndWriteTest(final Map<String, Integer> map) {
for (int i = 0; i < TEST_NO_ITEMS; i++) {
Integer randNumber = (int) Math.ceil(Math.random() * TEST_NO_ITEMS);
map.get(String.valueOf(randNumber));
map.put(String.valueOf(randNumber), randNumber);
}
}
@Benchmark
public void randomWriteSynchronizedMap() {
Map<String, Integer> map = Collections.synchronizedMap(new HashMap<String, Integer>());
performWriteTest(map);
}
@Benchmark
public void randomWriteConcurrentHashMap() {
Map<String, Integer> map = new ConcurrentHashMap<>();
performWriteTest(map);
}
private void performWriteTest(final Map<String, Integer> map) {
for (int i = 0; i < TEST_NO_ITEMS; i++) {
Integer randNumber = (int) Math.ceil(Math.random() * TEST_NO_ITEMS);
map.put(String.valueOf(randNumber), randNumber);
}
}
@Benchmark
public void randomReadSynchronizedMap() {
Map<String, Integer> map = Collections.synchronizedMap(new HashMap<String, Integer>());
performReadTest(map);
}
@Benchmark
public void randomReadConcurrentHashMap() {
Map<String, Integer> map = new ConcurrentHashMap<>();
performReadTest(map);
}
private void performReadTest(final Map<String, Integer> map) {
for (int i = 0; i < TEST_NO_ITEMS; i++) {
Integer randNumber = (int) Math.ceil(Math.random() * TEST_NO_ITEMS);
map.get(String.valueOf(randNumber));
}
}
}

View File

@ -1,41 +0,0 @@
package com.baeldung.map.concurrenthashmap;
import java.util.Collections;
import java.util.ConcurrentModificationException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import org.junit.Assert;
import org.junit.Test;
public class ConcurrentModificationErrorTest {
@Test(expected = ConcurrentModificationException.class)
public void whenRemoveAndAddOnHashMap_thenConcurrentModificationError() {
Map<Integer, String> map = new HashMap<>();
map.put(1, "baeldung");
map.put(2, "HashMap");
Map<Integer, String> synchronizedMap = Collections.synchronizedMap(map);
Iterator<Entry<Integer, String>> iterator = synchronizedMap.entrySet().iterator();
while (iterator.hasNext()) {
synchronizedMap.put(4, "Modification");
iterator.next();
}
}
public void whenRemoveAndAddOnConcurrentHashMap_thenNoError() {
Map<Integer, String> map = new ConcurrentHashMap<>();
map.put(1, "baeldung");
map.put(2, "HashMap");
Iterator<Entry<Integer, String>> iterator = map.entrySet().iterator();
while (iterator.hasNext()) {
map.put(4, "Modification");
iterator.next();
}
Assert.assertEquals(4, map.size());
}
}

View File

@ -1,27 +0,0 @@
package com.baeldung.map.concurrenthashmap;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.junit.Assert;
import org.junit.Test;
public class NullAllowInMapTest {
@Test
public void allowOnlyNull_In_SynchronizedMap() {
Map<String, Integer> map = Collections
.synchronizedMap(new HashMap<String, Integer>());
map.put(null, 1);
Assert.assertTrue(map.get(null).equals(1));
}
@Test(expected = NullPointerException.class)
public void allowOnlyNull_In_ConcurrentHasMap() {
Map<String, Integer> map = new ConcurrentHashMap<>();
map.put(null, 1);
}
}

View File

@ -1,158 +0,0 @@
package com.baeldung.map.concurrenthashmap;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.junit.Assert;
import org.junit.Test;
public class PerformanceTest {
public final static int THREAD_POOL_SIZE = 5;
public final static int TEST_ITERATIONS = 5;
public final static int TEST_NO_ITEMS = 10000;
@Test
public void randomReadAndWritePerformaceTest_ConcurrentHashMap_faster()
throws InterruptedException {
// For synchronizedMap
Long totalTimeForSynchronizedMap = 0l;
Map<String, Integer> slowerMap = Collections
.synchronizedMap(new HashMap<String, Integer>());
for (int i = 0; i < TEST_ITERATIONS; i++) {
totalTimeForSynchronizedMap += performReadAndWriteTest(slowerMap);
}
Long avgTimeForSynchronizedMap = totalTimeForSynchronizedMap / TEST_ITERATIONS;
// For ConcurrentHashMap Object
Long totalTimeForConcurrentHashMap = 0l;
Map<String, Integer> fasterMap = new ConcurrentHashMap<>();
for (int i = 0; i < TEST_ITERATIONS; i++) {
totalTimeForConcurrentHashMap += performReadAndWriteTest(fasterMap);
}
Long avgTimeForConcurrentHashMap = totalTimeForConcurrentHashMap / TEST_ITERATIONS;
Assert.assertTrue(avgTimeForSynchronizedMap > avgTimeForConcurrentHashMap);
}
private long performReadAndWriteTest(final Map<String, Integer> map)
throws InterruptedException {
long startTime = System.nanoTime();
ExecutorService exectures = Executors.newFixedThreadPool(THREAD_POOL_SIZE);
for (int j = 0; j < THREAD_POOL_SIZE; j++) {
exectures.execute(new Runnable() {
@Override
public void run() {
for (int i = 0; i < TEST_NO_ITEMS; i++) {
Integer randNumber = (int) Math.ceil(Math.random() * TEST_NO_ITEMS);
map.get(String.valueOf(randNumber));
map.put(String.valueOf(randNumber), randNumber);
}
}
});
}
exectures.shutdown();
exectures.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS);
long entTime = System.nanoTime();
return (entTime - startTime) / 1000000L;
}
@Test
public void randomWritePerformaceTest_ConcurrentHashMap_faster() throws InterruptedException {
// For synchronizedMap
Long totalTimeForSynchronizedMap = 0l;
Map<String, Integer> slowerMap = Collections
.synchronizedMap(new HashMap<String, Integer>());
for (int i = 0; i < TEST_ITERATIONS; i++) {
totalTimeForSynchronizedMap += performWriteTest(slowerMap);
}
Long avgTimeForSynchronizedMap = totalTimeForSynchronizedMap / TEST_ITERATIONS;
// For ConcurrentHashMap Object
Long totalTimeForConcurrentHashMap = 0l;
Map<String, Integer> fasterMap = new ConcurrentHashMap<>();
for (int i = 0; i < TEST_ITERATIONS; i++) {
totalTimeForConcurrentHashMap += performWriteTest(fasterMap);
}
Long avgTimeForConcurrentHashMap = totalTimeForConcurrentHashMap / TEST_ITERATIONS;
Assert.assertTrue(avgTimeForSynchronizedMap > avgTimeForConcurrentHashMap);
}
private long performWriteTest(final Map<String, Integer> map) throws InterruptedException {
long startTime = System.nanoTime();
ExecutorService exectures = Executors.newFixedThreadPool(THREAD_POOL_SIZE);
for (int j = 0; j < THREAD_POOL_SIZE; j++) {
exectures.execute(new Runnable() {
@Override
public void run() {
for (int i = 0; i < TEST_NO_ITEMS; i++) {
Integer randNumber = (int) Math.ceil(Math.random() * TEST_NO_ITEMS);
map.put(String.valueOf(randNumber), randNumber);
}
}
});
}
exectures.shutdown();
exectures.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS);
long entTime = System.nanoTime();
return (entTime - startTime) / 1000000L;
}
@Test
public void randomReadPerformaceTest_ConcurrentHashMap_faster() throws InterruptedException {
Map<String, Integer> slowerMap = Collections
.synchronizedMap(addItems(new HashMap<String, Integer>()));
// For synchronizedMap
Long totalTimeForSynchronizedMap = 0l;
for (int i = 0; i < TEST_ITERATIONS; i++) {
totalTimeForSynchronizedMap += performReadTest(slowerMap);
}
Long avgTimeForSynchronizedMap = totalTimeForSynchronizedMap / TEST_ITERATIONS;
Map<String, Integer> fasterMap = addItems(new ConcurrentHashMap<String, Integer>());
// For ConcurrentHashMap Object
Long totalTimeForConcurrentHashMap = 0l;
new ConcurrentHashMap<>();
for (int i = 0; i < TEST_ITERATIONS; i++) {
totalTimeForConcurrentHashMap += performReadTest(fasterMap);
}
Long avgTimeForConcurrentHashMap = totalTimeForConcurrentHashMap / TEST_ITERATIONS;
Assert.assertTrue(avgTimeForSynchronizedMap > avgTimeForConcurrentHashMap);
}
private Map<String, Integer> addItems(Map<String, Integer> map) {
for (int i = 0; i < TEST_NO_ITEMS; i++) {
Integer randNumber = (int) Math.ceil(Math.random() * TEST_NO_ITEMS);
map.put(String.valueOf(randNumber), randNumber);
}
return map;
}
private long performReadTest(final Map<String, Integer> map) throws InterruptedException {
long startTime = System.nanoTime();
ExecutorService exectures = Executors.newFixedThreadPool(THREAD_POOL_SIZE);
for (int j = 0; j < THREAD_POOL_SIZE; j++) {
exectures.execute(new Runnable() {
@Override
public void run() {
for (int i = 0; i < TEST_NO_ITEMS; i++) {
Integer randNumber = (int) Math.ceil(Math.random() * TEST_NO_ITEMS);
map.get(String.valueOf(randNumber));
}
}
});
}
exectures.shutdown();
exectures.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS);
long entTime = System.nanoTime();
return (entTime - startTime) / 1000000L;
}
}