ConcurrentHashMap 源码分析11完结篇之 merge()、treeifyBin()、untreeify()及线程安全

1. size()

  • 获取当前map的元素个数
public int size() {
    long n = sumCount();
    return ((n < 0L) ? 0 :
            (n > (long)Integer.MAX_VALUE) ? Integer.MAX_VALUE :
            (int)n);
}
/* 真正获取元素个数的方法 */
final long sumCount() {
	/* 元素个数 = baseCount + counterCells数组的值的总和 */
    CounterCell[] as = counterCells; CounterCell a;
    long sum = baseCount;
    if (as != null) {
        for (int i = 0; i < as.length; ++i) {
            if ((a = as[i]) != null)
                sum += a.value;
        }
    }
    return sum;
}

2. isEmpty()

  • 当前 map 是否为空,底层调用 sumCount() 进行判断
public boolean isEmpty() {
    return sumCount() <= 0L;
}

3. containsKey(Object key)

  • 是否包含 key 值
public boolean containsKey(Object key) {
    return get(key) != null;
}

4. containsValue(Object value)

  • 是否包含 value 值
public boolean containsValue(Object value) {
	/* null 异常检测 */
    if (value == null)
        throw new NullPointerException();
    Node<K,V>[] t;
    /* tab 不为空,遍历所有桶位,获取 value 值判断是否相等 */
    if ((t = table) != null) {
        Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
        for (Node<K,V> p; (p = it.advance()) != null; ) {
            V v;
            if ((v = p.val) == value || (v != null && value.equals(v)))
                return true;
        }
    }
    return false;
}

5. contains(Object value)

  • 判断是否存在 value 值,底层调用 containsValue(value)
public boolean contains(Object value) {
    return containsValue(value);
}

6. merge()

  • 根据key找到的节点 p, remappingFunction(key, value) 返回的 val
    • p 为 null,val 为 null:新建节点 Node(key, value)
    • p 不为 null,val 为 null:删除 p节点
    • p 为 null,val 为 null:新建节点 Node(key, val)
    • p 不为 null,val 不为 null:更新 p 节点的value值为 val
public V merge(K key, V value, BiFunction<? super V, ? super V, ? extends V> remappingFunction) {
	/* null 异常检测 */
    if (key == null || value == null || remappingFunction == null)
        throw new NullPointerException();
    int h = spread(key.hashCode()); // 计算 hash 值
    V val = null;
    int delta = 0;
    int binCount = 0;
    for (Node<K,V>[] tab = table;;) {
        Node<K,V> f; int n, i, fh;
        /* tab 为空,进行初始化 */
        if (tab == null || (n = tab.length) == 0)
            tab = initTable();
        /* hash值对应的桶位为null,新建节点Node(key,value) */    
        else if ((f = tabAt(tab, i = (n - 1) & h)) == null) {
            if (casTabAt(tab, i, null, new Node<K,V>(h, key, value, null))) {
                delta = 1;
                val = value;
                break;
            }
        }
        /* tab 正在扩容,先帮助扩容 */
        else if ((fh = f.hash) == MOVED)
            tab = helpTransfer(tab, f);
        else {
        	/* 桶位不为空 */
            synchronized (f) {
            	/* 判断 f 有没有被修改 */
                if (tabAt(tab, i) == f) {
                	/* 链表节点 */
                    if (fh >= 0) {
                        binCount = 1;
                        /* 循环遍历链表节点 */
                        for (Node<K,V> e = f, pred = null;; ++binCount) {
                            K ek;
                            /* 找到对应的节点 */
                            if (e.hash == h &&
                                ((ek = e.key) == key ||
                                 (ek != null && key.equals(ek)))) {
                                /* 根据 remappingFunction 返回的 val 判断是添加还是删除 */ 
                                val = remappingFunction.apply(e.val, value);
                                if (val != null)
                                    e.val = val;
                                else {
                                    delta = -1;
                                    Node<K,V> en = e.next;
                                    if (pred != null)
                                        pred.next = en;
                                    else
                                        setTabAt(tab, i, en);
                                }
                                break;
                            }
                            pred = e;
                            /* 找不到对应节点,在末尾新建节点Node(key,value) */
                            if ((e = e.next) == null) {
                                delta = 1;
                                val = value;
                                pred.next =
                                    new Node<K,V>(h, key, val, null);
                                break;
                            }
                        }
                    }
                    else if (f instanceof TreeBin) {
                        binCount = 2;
                        TreeBin<K,V> t = (TreeBin<K,V>)f;
                        TreeNode<K,V> r = t.root;
                        TreeNode<K,V> p = (r == null) ? null :
                            r.findTreeNode(h, key, null);
                        /* 根据 remappingFunction 返回的 val 判断是添加还是删除 */      
                        val = (p == null) ? value :
                            remappingFunction.apply(p.val, value);
                        if (val != null) {
                            if (p != null)
                                p.val = val;
                            else {
                            	/* 找不到对应节点,在末尾新建节点Node(key,value) */
                                delta = 1;
                                t.putTreeVal(h, key, val);
                            }
                        }
                        else if (p != null) {
                            delta = -1;
                            if (t.removeTreeNode(p))
                                setTabAt(tab, i, untreeify(t.first));
                        }
                    }
                }
            }
            /* 判断是否需要树化 */
            if (binCount != 0) {
                if (binCount >= TREEIFY_THRESHOLD)
                    treeifyBin(tab, i);
                break;
            }
        }
    }
    /* delta 不为0 要么添加,要么删除 */
    if (delta != 0)
        addCount((long)delta, binCount);
    return val;
}

7. mappingCount()

  • 返回 long 类型的元素个数
public long mappingCount() {
    long n = sumCount();
    return (n < 0L) ? 0L : n; // ignore transient negative values
}

8. treeifyBin(Node<K,V>[] tab, int index)

  • tab: 存储数据的 table 数组,index:对应的桶位
private final void treeifyBin(Node<K,V>[] tab, int index) {
    Node<K,V> b; int n, sc;
    /* tab 为 null 无需操作 */
    if (tab != null) {
    	/* 数组长度还未达到最小树化长度 MIN_TREEIFY_CAPACITY = 64 */
        if ((n = tab.length) < MIN_TREEIFY_CAPACITY)
            tryPresize(n << 1);
        /* 桶位不能为空 && b.hash > 0,否则不符合树化条件 */    
        else if ((b = tabAt(tab, index)) != null && b.hash >= 0) {
            synchronized (b) {
            	/* 加锁后判断有没有桶位首节点被其他线程修改 */
                if (tabAt(tab, index) == b) {
                    TreeNode<K,V> hd = null, tl = null;
                    /* 遍历链条生成树节点 */
                    for (Node<K,V> e = b; e != null; e = e.next) {
                        TreeNode<K,V> p =
                            new TreeNode<K,V>(e.hash, e.key, e.val,
                                              null, null);
                        if ((p.prev = tl) == null)
                            hd = p;
                        else
                            tl.next = p;
                        tl = p;
                    }
                    /* 设置桶位首节点为 new TreeBin<K,V>(hd)形成的树 */
                    setTabAt(tab, index, new TreeBin<K,V>(hd));
                }
            }
        }
    }
}

9. Node<K,V> untreeify(Node<K,V> b)

  • 非树化即链化节点
/* b: 首节点 */
static <K,V> Node<K,V> untreeify(Node<K,V> b) {
    Node<K,V> hd = null, tl = null;
    /* 红黑树也维护一个链表,即只需要 */
    for (Node<K,V> q = b; q != null; q = q.next) {
    	/* 新建链表节点,指针形成链 */
        Node<K,V> p = new Node<K,V>(q.hash, q.key, q.val, null);
        /* 末尾节点为null 即链表还没有节点 */
        if (tl == null)
            hd = p;
        else
            tl.next = p;
        tl = p;
    }
    return hd;
}

10. 总结

import java.util.concurrent.ConcurrentHashMap;

public class ConcurrentHashMapTest12 {
    public static void main(String[] args) {
        ConcurrentHashMap<Integer, Integer> concurrentHashMap = new ConcurrentHashMap<>();
        for(int i = 1; i <= 10; i++) {
            concurrentHashMap.put(i, i);
        }
        System.out.println("now concurrentHashMap: " + concurrentHashMap);
        System.out.println("====================================");
        System.out.println("concurrentHashMap.size(): " + concurrentHashMap.size());
        System.out.println("concurrentHashMap.mappingCount(): " + concurrentHashMap.mappingCount());
        System.out.println("concurrentHashMap.isEmpty(): " + concurrentHashMap.isEmpty());
        System.out.println("concurrentHashMap.containsKey(10): " + concurrentHashMap.containsKey(10));
        System.out.println("concurrentHashMap.containsKey(100): " + concurrentHashMap.containsKey(100));
        System.out.println("concurrentHashMap.containsValue(10): " + concurrentHashMap.containsValue(10));
        System.out.println("concurrentHashMap.containsValue(100): " + concurrentHashMap.containsValue(100));
        System.out.println("concurrentHashMap.contains(10): " + concurrentHashMap.contains(10));
        System.out.println("concurrentHashMap.contains(100): " + concurrentHashMap.contains(100));
        System.out.println("====================================");
        System.out.println("now concurrentHashMap: " + concurrentHashMap);
        System.out.println("concurrentHashMap.merge(1, 11, (key, value) -> value):" + concurrentHashMap.merge(1, 11, (key, value) -> value));
        System.out.println("now concurrentHashMap: " + concurrentHashMap);
        System.out.println("concurrentHashMap.merge(2, 12, (key, value) -> value):" + concurrentHashMap.merge(2, 12, (key, value) -> null));
        System.out.println("now concurrentHashMap: " + concurrentHashMap);
        System.out.println("concurrentHashMap.merge(13, 13, (key, value) -> value):" + concurrentHashMap.merge(13, 13, (key, value) -> value));
        System.out.println("now concurrentHashMap: " + concurrentHashMap);
        System.out.println("concurrentHashMap.merge(14, 14, (key, value) -> value):" + concurrentHashMap.merge(14, 14, (key, value) -> null));
        System.out.println("now concurrentHashMap: " + concurrentHashMap);
        System.out.println("====================================");
    }
}

在这里插入图片描述

11. ConcurrentHashMap 是否线程安全

  • 毫无疑问,肯定是线程安全的。要不然废那么多功夫干嘛。
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;

public class ThreadSafeTest {
    public static void main(String[] args) throws InterruptedException {
        ConcurrentHashMap<Integer, Integer> hashMap = new ConcurrentHashMap<>();
        CountDownLatch countDownLatch = new CountDownLatch(10);
        ThreadPoolExecutor threadPoolExecutor = new ThreadPoolExecutor(50, 100, 0L, TimeUnit.SECONDS,
                new LinkedBlockingDeque<>());

        for(int i = 1; i <= 10; i++) {
            int finalI = i;
            threadPoolExecutor.execute(() -> {
                for(int j = 1; j <= 10; j++) {
                    try {
                        Thread.sleep(10);
                    } catch (InterruptedException e) {
                        e.printStackTrace();
                    }
                    hashMap.put(finalI * 10 + j, j);
                };
                countDownLatch.countDown();
            });
        }
        countDownLatch.await();
        System.out.println("hashMap.size(): " + hashMap.size());
        AtomicInteger size = new AtomicInteger();
        hashMap.forEach((key, value) -> {
            System.out.print(key + " = " + value + "; \t");
            if(size.incrementAndGet() >= 10) {
                size.set(0);
                System.out.println();
            }
        });
    }
}

在这里插入图片描述

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值