其他分享
首页 > 其他分享> > 集合——LinkedHashSet

集合——LinkedHashSet

作者:互联网

记录LinkedHashSet执行过程

public class test {
    public static void main(String[] args) {
        //创建一个LinkedHashSet对象
        LinkedHashSet set = new LinkedHashSet();
        //添加数据
        for (int i = 0;i<10;i++){
            set.add(i);
        }
        set.add(null);
        set.add("323");
        //打印LinkedHashSet集合数据
        System.out.println(set);
    }
}
  1. LinkedHashSet set =newLinkedHashSet();
public class LinkedHashSet<E>
    extends HashSet<E>
    implements Set<E>, Cloneable, java.io.Serializable 
{
    //默认调用无参构造
    public LinkedHashSet() {
        //加载父类构造函数,LinkedHashSet实际上继承了HashSet,所以这里调用HashSet构造
        //16:默认数组长度,0.75:加载因子,用来计算数组需要扩容的边界点
        super(16, .75f, true);
    }
}

2. super(16,.75f,true);

public class HashSet<E>
    extends AbstractSet<E>
    implements Set<E>, Cloneable, java.io.Serializable
{
    //来到被LinkedHashSet无参构造器调用的HashSet构造器
    HashSet(int initialCapacity, float loadFactor, boolean dummy) {
        //可以看到LinkedHashSet的底层实际上就是LinkedHashMap
        map = new LinkedHashMap<>(initialCapacity, loadFactor);
    }
}

3. map =newLinkedHashMap<>(initialCapacity, loadFactor);

public class LinkedHashMap<K,V>
    extends HashMap<K,V>
    implements Map<K,V>
{
    //而LinkedHashMap又调用了HashMap的构造方法
    public LinkedHashMap(int initialCapacity, float loadFactor) {
        super(initialCapacity, loadFactor);
        accessOrder = false;
    }
}

4. super(initialCapacity, loadFactor);

public class HashMap<K,V> extends AbstractMap<K,V>
    implements Map<K,V>, Cloneable, Serializable 
{
    public HashMap(int initialCapacity, float loadFactor) {
        //如果初始化数组大小16 < 0 : false
        if (initialCapacity < 0)
            throw new IllegalArgumentException("Illegal initial capacity: " +
                                               initialCapacity);
        //如果初始化数组大小16 > 最大长度 : false
        if (initialCapacity > MAXIMUM_CAPACITY)
            initialCapacity = MAXIMUM_CAPACITY;
        //如果加载因子 <= 0或者非数字 : false
        if (loadFactor <= 0 || Float.isNaN(loadFactor))
            throw new IllegalArgumentException("Illegal load factor: " +
                                               loadFactor);
        //初始化加载因子 0.75f
        this.loadFactor = loadFactor;
        //初始化数组大小值
        this.threshold = tableSizeFor(initialCapacity);
    }
}

5. set.add(value);

public class HashSet<E>
    extends AbstractSet<E>
    implements Set<E>, Cloneable, java.io.Serializable
{
    public boolean add(E e) {
        //调用之前创建的LinkedHashMap对象的put方法
        return map.put(e, PRESENT)==null;
    }
}

public class HashMap<K,V> extends AbstractMap<K,V>
implements Map<K,V>, Cloneable, Serializable
{
public V put(K key, V value) {
//计算hash值用于定位数组下标并调用putVal方法
return putVal(hash(key), key, value, false, true);
}
}

6. returnputVal(hash(key), key, value,false,true);

public class HashMap<K,V> extends AbstractMap<K,V>
    implements Map<K,V>, Cloneable, Serializable 
{
    final V putVal(int hash, K key, V value, boolean onlyIfAbsent,
                   boolean evict) {
        Node<K,V>[] tab; Node<K,V> p; int n, i;
        //当数组为空时,第一次调用add时此条件成立
        if ((tab = table) == null || (n = tab.length) == 0)
            //table数组初始化大小 为16 -> resize()
            n = (tab = resize()).length;
        //当hash值计算出的数组下标的值为null
        if ((p = tab[i = (n - 1) & hash]) == null)
            //在当前下标创建一个包含被添加元素的node节点
            tab[i] = newNode(hash, key, value, null);
        //当hash值计算出的数组下标的值不为null进入else块
        else {
            Node<K,V> e; K k;
            //当hash值相同并且下标的元素与要添加的元素内容或地址引用地址相同
            if (p.hash == hash &&
                ((k = p.key) == key || (key != null && key.equals(k))))
                e = p;
            //如果当前下标内容的存储结构为红黑树
            else if (p instanceof TreeNode)
                e = ((TreeNode<K,V>)p).putTreeVal(this, tab, hash, key, value);
            //否则
            else {
                //进入死循环
                for (int binCount = 0; ; ++binCount) {
                    //如果当前节点的下一个节点为null,就把包含新添加元素的节点放在链表尾部
                    if ((e = p.next) == null) {
                        p.next = newNode(hash, key, value, null);
                        if (binCount >= TREEIFY_THRESHOLD - 1) // -1 for 1st
                            treeifyBin(tab, hash);
                        break;
                    }
                    //如果链表中有与新添加元素相同内容或相同引用地址的元素存在,则放弃添加
                    if (e.hash == hash &&
                        ((k = e.key) == key || (key != null && key.equals(k))))
                        break;
                    p = e;
                }
            }
            if (e != null) { // existing mapping for key
                V oldValue = e.value;
                if (!onlyIfAbsent || oldValue == null)
                    e.value = value;
                afterNodeAccess(e);
                return oldValue;
            }
        }
        //操作记录数
        ++modCount;
        //如果+1后的元素总数 * 0.75f > 12 ,则进行扩容
        if (++size > threshold)
            resize();
        afterNodeInsertion(evict);
        //返回null 表示添加成功了
        return null;
    }
}

到此为止,程序上好像并没有体现双向链表的事情,接着看

public class LinkedHashMap<K,V>
    extends HashMap<K,V>
    implements Map<K,V>
{
    //当在链表尾部添加一个新的节点时,会调用linkNodeLast方法
    Node<K,V> newNode(int hash, K key, V value, Node<K,V> e) {
        LinkedHashMap.Entry<K,V> p =
            new LinkedHashMap.Entry<K,V>(hash, key, value, e);
        linkNodeLast(p);
        return p;
    }
<span class="hljs-function"><span class="hljs-keyword">private <span class="hljs-keyword">void <span class="hljs-title">linkNodeLast<span class="hljs-params">(LinkedHashMap.Entry&lt;K,V&gt; p) {
    <span class="hljs-comment">//获取当前链表最后一个Entry节点,Entry是LinkedHashMap的内部类,继承了Node类
    LinkedHashMap.Entry&lt;K,V&gt; last = tail;
    <span class="hljs-comment">//让tail指向新添加的Entry节点
    tail = p;
    <span class="hljs-comment">//如果尾节点为null
    <span class="hljs-keyword">if (last == <span class="hljs-keyword">null)
        <span class="hljs-comment">//让头节点指向新添加的Entry节点
        head = p;
    <span class="hljs-keyword">else {
        <span class="hljs-comment">//如果尾节点不为null,则让将新的Entry节点变为尾节点
        <span class="hljs-comment">//新节点的前指针指向上一节点
        p.before = last;
        <span class="hljs-comment">//old尾节点的后指针指向新节点
        last.after = p;
    }
}

}


END

标签:hash,value,节点,key,集合,LinkedHashSet,null,public
来源: https://www.cnblogs.com/isclay/p/15311890.html