HashMap extends AbstractMap implements Map, Cloneable, Serializable
HashMap是我们最常用的存储数据的对象,相信大部分同学都知道HashMap的底层是数组+链表实现的,默认的初始容量为16,而扩容因子为0.75,这两个值在创建对象的时候是可以通过参数显示指定的,那接下来我们就看看HashMap究竟是怎么实现的;
HashMap map = new HashMap<>();
以上是没有参数的HashMap对象,创建的过程如下
public HashMap() { this.loadFactor = DEFAULT_LOAD_FACTOR; // all other fields defaulted 扩容因私默认为0.75 }
到此就结束了,所以HashMap初始化的数组长度是多少呢?答案是没有!
HashMap里的数组不同于ArrayList里面的数组,ArrayList里面的数组存储的Object,而HashMap的数组却是Node;
Node是什么鬼呢?我们来看看Node,该Node用于HashMap的链表部分,红黑树另有别的结构TreeNode
static class Node implements Map.Entry { // Entry是Map内部的一个接口 final int hash; // hash值 final K key; // 键 V value; // 值 Node next; // 下一个节点 Node(int hash, K key, V value, Node next) { this.hash = hash; this.key = key; this.value = value; this.next = next; } public final K getKey() { return key; } public final V getValue() { return value; } public final String toString() { return key + "=" + value; } public final int hashCode() { // 获取Node的Hash值时,其实是key和value的hash异或 return Objects.hashCode(key) ^ Objects.hashCode(value); } public final V setValue(V newValue) { V oldValue = value; value = newValue; return oldValue; } public final boolean equals(Object o) { if (o == this) return true; if (o instanceof Map.Entry) { Map.Entry<?,?> e = (Map.Entry<?,?>)o; if (Objects.equals(key, e.getKey()) && Objects.equals(value, e.getValue())) return true; } return false; } }
接下来看看最精彩的put数据过程,第一次put数据分析
public V put(K key, V value) { // 调用put方法时并没有直接put值,而是先调用的hash值计算 return putVal(hash(key), key, value, false, true); }
static final int hash(Object key) { // 使用key的hashCode与自身的高16位去异或,目的是为了分散hash值 int h; return (key == null) ? 0 : (h = key.hashCode()) ^ (h >>> 16); // ^ 取异或 h >>> 16 右移16位,高位补0 }
final V putVal(int hash, K key, V value, boolean onlyIfAbsent, boolean evict) { Node[] tab; Node p; int n, i; if ((tab = table) == null || (n = tab.length) == 0) // 此时的table为null n = (tab = resize()).length; // 第一次就扩容,先看下面的代码 if ((p = tab[i = (n - 1) & hash]) == null) // 扩容后用key计算的hash值和数组长度 -1 取与运算 得出0~n之间的数 tab[i] = newNode(hash, key, value, null); // 该数字决定了此键值对在数组中的存储位置 else { Node e; K k; if (p.hash == hash && ((k = p.key) == key || (key != null && key.equals(k)))) e = p; else if (p instanceof TreeNode) e = ((TreeNode)p).putTreeVal(this, tab, hash, key, value); else { for (int binCount = 0; ; ++binCount) { if ((e = p.next) == null) { p.next = newNode(hash, key, value, null); if (binCount >= TREEIFY_THRESHOLD - 1) // -1 for 1st treeifyBin(tab, hash); break; } if (e.hash == hash && ((k = e.key) == key || (key != null && key.equals(k)))) break; p = e; } } if (e != null) { // existing mapping for key V oldValue = e.value; if (!onlyIfAbsent || oldValue == null) e.value = value; afterNodeAccess(e); return oldValue; } } ++modCount; if (++size > threshold) resize(); afterNodeInsertion(evict); return null; }
我们接着看第一次的扩容方法
final Node[] resize() { Node[] oldTab = table; // 很明显table为null int oldCap = (oldTab == null) ? 0 : oldTab.length; // oldCap 等于0 int oldThr = threshold; int newCap, newThr = 0; if (oldCap > 0) { if (oldCap >= MAXIMUM_CAPACITY) { threshold = Integer.MAX_VALUE; return oldTab; } else if ((newCap = oldCap << 1) < MAXIMUM_CAPACITY && oldCap >= DEFAULT_INITIAL_CAPACITY) newThr = oldThr << 1; // double threshold } else if (oldThr > 0) // initial capacity was placed in threshold newCap = oldThr; else { // zero initial threshold signifies using defaults newCap = DEFAULT_INITIAL_CAPACITY; // 默认的初始化长度为 10 newThr = (int)(DEFAULT_LOAD_FACTOR * DEFAULT_INITIAL_CAPACITY); // 存储的数据node个数 } if (newThr == 0) { float ft = (float)newCap * loadFactor; newThr = (newCap < MAXIMUM_CAPACITY && ft < (float)MAXIMUM_CAPACITY ? (int)ft : Integer.MAX_VALUE); } threshold = newThr; // 扩容门槛为16 * 0.75 = 12 @SuppressWarnings({"rawtypes","unchecked"}) Node[] newTab = (Node[])new Node[newCap]; table = newTab; // table被赋新数组 if (oldTab != null) { for (int j = 0; j < oldCap; ++j) { Node e; if ((e = oldTab[j]) != null) { oldTab[j] = null; if (e.next == null) newTab[e.hash & (newCap - 1)] = e; else if (e instanceof TreeNode) ((TreeNode)e).split(this, newTab, j, oldCap); else { // preserve order Node loHead = null, loTail = null; Node hiHead = null, hiTail = null; Node next; do { next = e.next; if ((e.hash & oldCap) == 0) { if (loTail == null) loHead = e; else loTail.next = e; loTail = e; } else { if (hiTail == null) hiHead = e; else hiTail.next = e; hiTail = e; } } while ((e = next) != null); if (loTail != null) { loTail.next = null; newTab[j] = loHead; } if (hiTail != null) { hiTail.next = null; newTab[j + oldCap] = hiHead; } } } } } return newTab; }
所以,HashMap的默认容量是在第一次put值时确定的
HashMap数组扩容的条件有两个,第一是数据的个数超过了门槛值,第二个是链表转红黑树的时候,如果数组的长度小于64,那么也会首先会扩容