HiSEN

如何线程安全的使用HashMap

线程不安全的原因

  1. HashMap底层是一个Entry数组,一旦发生Hash冲突的的时候,HashMap采用拉链法解决碰撞冲突.
  2. put方法也不是同步的
  3. 扩容的方法也不是同步的

参考:https://www.cnblogs.com/qiumingcheng/p/5259892.html

如何线程安全的使用

  1. Hashtable
  2. ConcurrentHashMap
  3. SynchronizedMap

####例子

1
2
3
4
5
6
//Hashtable
Map<String, String> hashtable = new Hashtable<>();
//synchronizedMap
Map<String, String> synchronizedHashMap = Collections.synchronizedMap(new HashMap<String, String>());
//ConcurrentHashMap
Map<String, String> concurrentHashMap = new ConcurrentHashMap<>();

####性能对比
使用ExecutorService来并发运行5个线程,每个线程添加/获取500K个元素。

从数据可以看出,ConcurrentHashMap效率最高

代码如下

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
package com.hisen.collection.map;

import java.util.Collections;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;

/**
* @author : yhx
* @date : 2017/11/14 23:36
* @descriptor : 测试不同的Map - 使用ExecutorService来并发运行5个线程,每个线程添加/获取500K个元素。
*/
public class CrunchifyConcurrentHashMapVsSynchronizedMap {
private static final int THREAD_POOL_SIZE = 5;
public static Map<String, Integer> crunchifyHashTableObject = null;
public static Map<String, Integer> crunchifySynchronizedMapObject = null;
public static Map<String, Integer> crunchifyConcurrentHashMapObject = null;

public static void main(String[] args) throws InterruptedException {
// Test with Hashtable Object
crunchifyHashTableObject = new Hashtable<>();
crunchifyPerformTest(crunchifyHashTableObject);

// Test with synchronizedMap Object
crunchifySynchronizedMapObject = Collections.synchronizedMap(new HashMap<String, Integer>());
crunchifyPerformTest(crunchifySynchronizedMapObject);

// Test with ConcurrentHashMap Object
crunchifyConcurrentHashMapObject = new ConcurrentHashMap<>();
crunchifyPerformTest(crunchifyConcurrentHashMapObject);

/**
* 测试结果
Test start for:class java.util.Hashtable
2500K entried added/retrieved in 2953 ms
2500K entried added/retrieved in 4649 ms
2500K entried added/retrieved in 2736 ms
2500K entried added/retrieved in 2628 ms
2500K entried added/retrieved in 2621 ms
For class java.util.Hashtable the average time is 3117 ms

Test start for:class java.util.Collections$SynchronizedMap
2500K entried added/retrieved in 3036 ms
2500K entried added/retrieved in 2881 ms
2500K entried added/retrieved in 2692 ms
2500K entried added/retrieved in 3020 ms
2500K entried added/retrieved in 2806 ms
For class java.util.Collections$SynchronizedMap the average time is 2887 ms

Test start for:class java.util.concurrent.ConcurrentHashMap
2500K entried added/retrieved in 4378 ms
2500K entried added/retrieved in 1126 ms
2500K entried added/retrieved in 1008 ms
2500K entried added/retrieved in 935 ms
2500K entried added/retrieved in 1069 ms
For class java.util.concurrent.ConcurrentHashMap the average time is 1703 ms
*/
}

private static void crunchifyPerformTest(Map<String, Integer> crunchifyThreads)
throws InterruptedException {
System.out.println("Test start for:" + crunchifyThreads.getClass());
long avgTime = 0;
for (int i = 0; i < 5; i++) {
long startTime = System.nanoTime();

ExecutorService executorService = Executors.newFixedThreadPool(THREAD_POOL_SIZE);
for (int j = 0; j < THREAD_POOL_SIZE; j++) {
executorService.execute(new Runnable() {
@Override
public void run() {
for (int k = 0; k < 500000; k++) {
Integer crunchifyRandomNumber = (int) Math.ceil(Math.random() * 550000);
// Retrieve value. We are not using it anywhere
Integer crunchifyValue = crunchifyThreads.get(String.valueOf(crunchifyRandomNumber));
// Put value
crunchifyThreads.put(String.valueOf(crunchifyRandomNumber), crunchifyRandomNumber);
}
}
});
}
// Make sure executor stops
executorService.shutdown();
// Blocks until all tasks have completed execution after a shutdown request
executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS);
long entTime = System.nanoTime();
long totalTime = (entTime - startTime) / 1000000L;
avgTime += totalTime;
System.out.println("2500K entried added/retrieved in " + totalTime + " ms");
}
System.out.println("For " + crunchifyThreads.getClass() + " the average time is " + avgTime / 5 + " ms\n");
}
}