ViewVC Help
View File | Revision Log | Show Annotations | Download File | Root Listing
root/jsr166/jsr166/src/main/java/util/concurrent/ConcurrentHashMap.java
(Generate patch)

Comparing jsr166/src/main/java/util/concurrent/ConcurrentHashMap.java (file contents):
Revision 1.228 by jsr166, Tue Jun 18 18:39:14 2013 UTC vs.
Revision 1.313 by jsr166, Mon Oct 1 00:10:53 2018 UTC

# Line 10 | Line 10 | import java.io.ObjectStreamField;
10   import java.io.Serializable;
11   import java.lang.reflect.ParameterizedType;
12   import java.lang.reflect.Type;
13 + import java.util.AbstractMap;
14   import java.util.Arrays;
15   import java.util.Collection;
15 import java.util.Comparator;
16 import java.util.ConcurrentModificationException;
16   import java.util.Enumeration;
17   import java.util.HashMap;
18   import java.util.Hashtable;
# Line 22 | Line 21 | import java.util.Map;
21   import java.util.NoSuchElementException;
22   import java.util.Set;
23   import java.util.Spliterator;
25 import java.util.concurrent.ConcurrentMap;
26 import java.util.concurrent.ForkJoinPool;
24   import java.util.concurrent.atomic.AtomicReference;
25   import java.util.concurrent.locks.LockSupport;
26   import java.util.concurrent.locks.ReentrantLock;
27   import java.util.function.BiConsumer;
28   import java.util.function.BiFunction;
32 import java.util.function.BinaryOperator;
29   import java.util.function.Consumer;
30   import java.util.function.DoubleBinaryOperator;
31   import java.util.function.Function;
32   import java.util.function.IntBinaryOperator;
33   import java.util.function.LongBinaryOperator;
34 + import java.util.function.Predicate;
35   import java.util.function.ToDoubleBiFunction;
36   import java.util.function.ToDoubleFunction;
37   import java.util.function.ToIntBiFunction;
# Line 42 | Line 39 | import java.util.function.ToIntFunction;
39   import java.util.function.ToLongBiFunction;
40   import java.util.function.ToLongFunction;
41   import java.util.stream.Stream;
42 + import jdk.internal.misc.Unsafe;
43  
44   /**
45   * A hash table supporting full concurrency of retrievals and
# Line 64 | Line 62 | import java.util.stream.Stream;
62   * that key reporting the updated value.)  For aggregate operations
63   * such as {@code putAll} and {@code clear}, concurrent retrievals may
64   * reflect insertion or removal of only some entries.  Similarly,
65 < * Iterators and Enumerations return elements reflecting the state of
66 < * the hash table at some point at or since the creation of the
65 > * Iterators, Spliterators and Enumerations return elements reflecting the
66 > * state of the hash table at some point at or since the creation of the
67   * iterator/enumeration.  They do <em>not</em> throw {@link
68 < * ConcurrentModificationException}.  However, iterators are designed
69 < * to be used by only one thread at a time.  Bear in mind that the
70 < * results of aggregate status methods including {@code size}, {@code
71 < * isEmpty}, and {@code containsValue} are typically useful only when
72 < * a map is not undergoing concurrent updates in other threads.
68 > * java.util.ConcurrentModificationException ConcurrentModificationException}.
69 > * However, iterators are designed to be used by only one thread at a time.
70 > * Bear in mind that the results of aggregate status methods including
71 > * {@code size}, {@code isEmpty}, and {@code containsValue} are typically
72 > * useful only when a map is not undergoing concurrent updates in other threads.
73   * Otherwise the results of these methods reflect transient states
74   * that may be adequate for monitoring or estimation purposes, but not
75   * for program control.
# Line 104 | Line 102 | import java.util.stream.Stream;
102   * mapped values are (perhaps transiently) not used or all take the
103   * same mapping value.
104   *
105 < * <p>A ConcurrentHashMap can be used as scalable frequency map (a
105 > * <p>A ConcurrentHashMap can be used as a scalable frequency map (a
106   * form of histogram or multiset) by using {@link
107   * java.util.concurrent.atomic.LongAdder} values and initializing via
108   * {@link #computeIfAbsent computeIfAbsent}. For example, to add a count
109   * to a {@code ConcurrentHashMap<String,LongAdder> freqs}, you can use
110 < * {@code freqs.computeIfAbsent(k -> new LongAdder()).increment();}
110 > * {@code freqs.computeIfAbsent(key, k -> new LongAdder()).increment();}
111   *
112   * <p>This class and its views and iterators implement all of the
113   * <em>optional</em> methods of the {@link Map} and {@link Iterator}
# Line 124 | Line 122 | import java.util.stream.Stream;
122   * being concurrently updated by other threads; for example, when
123   * computing a snapshot summary of the values in a shared registry.
124   * There are three kinds of operation, each with four forms, accepting
125 < * functions with Keys, Values, Entries, and (Key, Value) arguments
126 < * and/or return values. Because the elements of a ConcurrentHashMap
127 < * are not ordered in any particular way, and may be processed in
128 < * different orders in different parallel executions, the correctness
129 < * of supplied functions should not depend on any ordering, or on any
130 < * other objects or values that may transiently change while
131 < * computation is in progress; and except for forEach actions, should
132 < * ideally be side-effect-free. Bulk operations on {@link Map.Entry}
133 < * objects do not support method {@code setValue}.
125 > * functions with keys, values, entries, and (key, value) pairs as
126 > * arguments and/or return values. Because the elements of a
127 > * ConcurrentHashMap are not ordered in any particular way, and may be
128 > * processed in different orders in different parallel executions, the
129 > * correctness of supplied functions should not depend on any
130 > * ordering, or on any other objects or values that may transiently
131 > * change while computation is in progress; and except for forEach
132 > * actions, should ideally be side-effect-free. Bulk operations on
133 > * {@link Map.Entry} objects do not support method {@code setValue}.
134   *
135   * <ul>
136 < * <li> forEach: Perform a given action on each element.
136 > * <li>forEach: Performs a given action on each element.
137   * A variant form applies a given transformation on each element
138 < * before performing the action.</li>
138 > * before performing the action.
139   *
140 < * <li> search: Return the first available non-null result of
140 > * <li>search: Returns the first available non-null result of
141   * applying a given function on each element; skipping further
142 < * search when a result is found.</li>
142 > * search when a result is found.
143   *
144 < * <li> reduce: Accumulate each element.  The supplied reduction
144 > * <li>reduce: Accumulates each element.  The supplied reduction
145   * function cannot rely on ordering (more formally, it should be
146   * both associative and commutative).  There are five variants:
147   *
148   * <ul>
149   *
150 < * <li> Plain reductions. (There is not a form of this method for
150 > * <li>Plain reductions. (There is not a form of this method for
151   * (key, value) function arguments since there is no corresponding
152 < * return type.)</li>
152 > * return type.)
153   *
154 < * <li> Mapped reductions that accumulate the results of a given
155 < * function applied to each element.</li>
154 > * <li>Mapped reductions that accumulate the results of a given
155 > * function applied to each element.
156   *
157 < * <li> Reductions to scalar doubles, longs, and ints, using a
158 < * given basis value.</li>
157 > * <li>Reductions to scalar doubles, longs, and ints, using a
158 > * given basis value.
159   *
160   * </ul>
163 * </li>
161   * </ul>
162   *
163   * <p>These bulk operations accept a {@code parallelismThreshold}
# Line 227 | Line 224 | import java.util.stream.Stream;
224   * <p>All arguments to all task methods must be non-null.
225   *
226   * <p>This class is a member of the
227 < * <a href="{@docRoot}/../technotes/guides/collections/index.html">
227 > * <a href="{@docRoot}/java.base/java/util/package-summary.html#CollectionsFramework">
228   * Java Collections Framework</a>.
229   *
230   * @since 1.5
# Line 235 | Line 232 | import java.util.stream.Stream;
232   * @param <K> the type of keys maintained by this map
233   * @param <V> the type of mapped values
234   */
235 < public class ConcurrentHashMap<K,V> implements ConcurrentMap<K,V>, Serializable {
235 > public class ConcurrentHashMap<K,V> extends AbstractMap<K,V>
236 >    implements ConcurrentMap<K,V>, Serializable {
237      private static final long serialVersionUID = 7249069246763182397L;
238  
239      /*
# Line 262 | Line 260 | public class ConcurrentHashMap<K,V> impl
260       * because they have negative hash fields and null key and value
261       * fields. (These special nodes are either uncommon or transient,
262       * so the impact of carrying around some unused fields is
263 <     * insignficant.)
263 >     * insignificant.)
264       *
265       * The table is lazily initialized to a power-of-two size upon the
266       * first insertion.  Each bin in the table normally contains a
# Line 270 | Line 268 | public class ConcurrentHashMap<K,V> impl
268       * Table accesses require volatile/atomic reads, writes, and
269       * CASes.  Because there is no other way to arrange this without
270       * adding further indirections, we use intrinsics
271 <     * (sun.misc.Unsafe) operations.
271 >     * (jdk.internal.misc.Unsafe) operations.
272       *
273       * We use the top (sign) bit of Node hash fields for control
274       * purposes -- it is available anyway because of addressing
# Line 343 | Line 341 | public class ConcurrentHashMap<K,V> impl
341       * The table is resized when occupancy exceeds a percentage
342       * threshold (nominally, 0.75, but see below).  Any thread
343       * noticing an overfull bin may assist in resizing after the
344 <     * initiating thread allocates and sets up the replacement
345 <     * array. However, rather than stalling, these other threads may
346 <     * proceed with insertions etc.  The use of TreeBins shields us
347 <     * from the worst case effects of overfilling while resizes are in
344 >     * initiating thread allocates and sets up the replacement array.
345 >     * However, rather than stalling, these other threads may proceed
346 >     * with insertions etc.  The use of TreeBins shields us from the
347 >     * worst case effects of overfilling while resizes are in
348       * progress.  Resizing proceeds by transferring bins, one by one,
349 <     * from the table to the next table. To enable concurrency, the
350 <     * next table must be (incrementally) prefilled with place-holders
351 <     * serving as reverse forwarders to the old table.  Because we are
349 >     * from the table to the next table. However, threads claim small
350 >     * blocks of indices to transfer (via field transferIndex) before
351 >     * doing so, reducing contention.  A generation stamp in field
352 >     * sizeCtl ensures that resizings do not overlap. Because we are
353       * using power-of-two expansion, the elements from each bin must
354       * either stay at same index, or move with a power of two
355       * offset. We eliminate unnecessary node creation by catching
# Line 371 | Line 370 | public class ConcurrentHashMap<K,V> impl
370       * locks, average aggregate waits become shorter as resizing
371       * progresses.  The transfer operation must also ensure that all
372       * accessible bins in both the old and new table are usable by any
373 <     * traversal.  This is arranged by proceeding from the last bin
374 <     * (table.length - 1) up towards the first.  Upon seeing a
375 <     * forwarding node, traversals (see class Traverser) arrange to
376 <     * move to the new table without revisiting nodes.  However, to
377 <     * ensure that no intervening nodes are skipped, bin splitting can
378 <     * only begin after the associated reverse-forwarders are in
379 <     * place.
373 >     * traversal.  This is arranged in part by proceeding from the
374 >     * last bin (table.length - 1) up towards the first.  Upon seeing
375 >     * a forwarding node, traversals (see class Traverser) arrange to
376 >     * move to the new table without revisiting nodes.  To ensure that
377 >     * no intervening nodes are skipped even when moved out of order,
378 >     * a stack (see class TableStack) is created on first encounter of
379 >     * a forwarding node during a traversal, to maintain its place if
380 >     * later processing the current table. The need for these
381 >     * save/restore mechanics is relatively rare, but when one
382 >     * forwarding node is encountered, typically many more will be.
383 >     * So Traversers use a simple caching scheme to avoid creating so
384 >     * many new TableStack nodes. (Thanks to Peter Levart for
385 >     * suggesting use of a stack here.)
386       *
387       * The traversal scheme also applies to partial traversals of
388       * ranges of bins (via an alternate Traverser constructor)
# Line 409 | Line 414 | public class ConcurrentHashMap<K,V> impl
414       * related operations (which is the main reason we cannot use
415       * existing collections such as TreeMaps). TreeBins contain
416       * Comparable elements, but may contain others, as well as
417 <     * elements that are Comparable but not necessarily Comparable
418 <     * for the same T, so we cannot invoke compareTo among them. To
419 <     * handle this, the tree is ordered primarily by hash value, then
420 <     * by Comparable.compareTo order if applicable.  On lookup at a
421 <     * node, if elements are not comparable or compare as 0 then both
422 <     * left and right children may need to be searched in the case of
423 <     * tied hash values. (This corresponds to the full list search
424 <     * that would be necessary if all elements were non-Comparable and
425 <     * had tied hashes.)  The red-black balancing code is updated from
426 <     * pre-jdk-collections
417 >     * elements that are Comparable but not necessarily Comparable for
418 >     * the same T, so we cannot invoke compareTo among them. To handle
419 >     * this, the tree is ordered primarily by hash value, then by
420 >     * Comparable.compareTo order if applicable.  On lookup at a node,
421 >     * if elements are not comparable or compare as 0 then both left
422 >     * and right children may need to be searched in the case of tied
423 >     * hash values. (This corresponds to the full list search that
424 >     * would be necessary if all elements were non-Comparable and had
425 >     * tied hashes.) On insertion, to keep a total ordering (or as
426 >     * close as is required here) across rebalancings, we compare
427 >     * classes and identityHashCodes as tie-breakers. The red-black
428 >     * balancing code is updated from pre-jdk-collections
429       * (http://gee.cs.oswego.edu/dl/classes/collections/RBCell.java)
430       * based in turn on Cormen, Leiserson, and Rivest "Introduction to
431       * Algorithms" (CLR).
432       *
433       * TreeBins also require an additional locking mechanism.  While
434       * list traversal is always possible by readers even during
435 <     * updates, tree traversal is not, mainly beause of tree-rotations
435 >     * updates, tree traversal is not, mainly because of tree-rotations
436       * that may change the root node and/or its linkages.  TreeBins
437       * include a simple read-write lock mechanism parasitic on the
438       * main bin-synchronization strategy: Structural adjustments
# Line 441 | Line 448 | public class ConcurrentHashMap<K,V> impl
448       *
449       * Maintaining API and serialization compatibility with previous
450       * versions of this class introduces several oddities. Mainly: We
451 <     * leave untouched but unused constructor arguments refering to
451 >     * leave untouched but unused constructor arguments referring to
452       * concurrencyLevel. We accept a loadFactor constructor argument,
453       * but apply it only to initial table capacity (which is the only
454       * time that we can guarantee to honor it.) We also declare an
455       * unused "Segment" class that is instantiated in minimal form
456       * only when serializing.
457       *
458 +     * Also, solely for compatibility with previous versions of this
459 +     * class, it extends AbstractMap, even though all of its methods
460 +     * are overridden, so it is just useless baggage.
461 +     *
462       * This file is organized to make things a little easier to follow
463       * while reading than they might otherwise: First the main static
464       * declarations and utilities, then fields, then main public
# Line 528 | Line 539 | public class ConcurrentHashMap<K,V> impl
539       */
540      private static final int MIN_TRANSFER_STRIDE = 16;
541  
542 +    /**
543 +     * The number of bits used for generation stamp in sizeCtl.
544 +     * Must be at least 6 for 32bit arrays.
545 +     */
546 +    private static final int RESIZE_STAMP_BITS = 16;
547 +
548 +    /**
549 +     * The maximum number of threads that can help resize.
550 +     * Must fit in 32 - RESIZE_STAMP_BITS bits.
551 +     */
552 +    private static final int MAX_RESIZERS = (1 << (32 - RESIZE_STAMP_BITS)) - 1;
553 +
554 +    /**
555 +     * The bit shift for recording size stamp in sizeCtl.
556 +     */
557 +    private static final int RESIZE_STAMP_SHIFT = 32 - RESIZE_STAMP_BITS;
558 +
559      /*
560       * Encodings for Node hash fields. See above for explanation.
561       */
562 <    static final int MOVED     = 0x8fffffff; // (-1) hash for forwarding nodes
563 <    static final int TREEBIN   = 0x80000000; // hash for heads of treea
564 <    static final int RESERVED  = 0x80000001; // hash for transient reservations
562 >    static final int MOVED     = -1; // hash for forwarding nodes
563 >    static final int TREEBIN   = -2; // hash for roots of trees
564 >    static final int RESERVED  = -3; // hash for transient reservations
565      static final int HASH_BITS = 0x7fffffff; // usable bits of normal node hash
566  
567      /** Number of CPUS, to place bounds on some sizings */
568      static final int NCPU = Runtime.getRuntime().availableProcessors();
569  
570 <    /** For serialization compatibility. */
570 >    /**
571 >     * Serialized pseudo-fields, provided only for jdk7 compatibility.
572 >     * @serialField segments Segment[]
573 >     *   The segments, each of which is a specialized hash table.
574 >     * @serialField segmentMask int
575 >     *   Mask value for indexing into segments. The upper bits of a
576 >     *   key's hash code are used to choose the segment.
577 >     * @serialField segmentShift int
578 >     *   Shift value for indexing within segments.
579 >     */
580      private static final ObjectStreamField[] serialPersistentFields = {
581          new ObjectStreamField("segments", Segment[].class),
582          new ObjectStreamField("segmentMask", Integer.TYPE),
583 <        new ObjectStreamField("segmentShift", Integer.TYPE)
583 >        new ObjectStreamField("segmentShift", Integer.TYPE),
584      };
585  
586      /* ---------------- Nodes -------------- */
# Line 552 | Line 589 | public class ConcurrentHashMap<K,V> impl
589       * Key-value entry.  This class is never exported out as a
590       * user-mutable Map.Entry (i.e., one supporting setValue; see
591       * MapEntry below), but can be used for read-only traversals used
592 <     * in bulk tasks.  Subclasses of Node with a negativehash field
592 >     * in bulk tasks.  Subclasses of Node with a negative hash field
593       * are special, and contain null keys and values (but are never
594       * exported).  Otherwise, keys and vals are never null.
595       */
# Line 560 | Line 597 | public class ConcurrentHashMap<K,V> impl
597          final int hash;
598          final K key;
599          volatile V val;
600 <        Node<K,V> next;
600 >        volatile Node<K,V> next;
601  
602 <        Node(int hash, K key, V val, Node<K,V> next) {
602 >        Node(int hash, K key, V val) {
603              this.hash = hash;
604              this.key = key;
605              this.val = val;
606 +        }
607 +
608 +        Node(int hash, K key, V val, Node<K,V> next) {
609 +            this(hash, key, val);
610              this.next = next;
611          }
612  
613 <        public final K getKey()       { return key; }
614 <        public final V getValue()     { return val; }
615 <        public final int hashCode()   { return key.hashCode() ^ val.hashCode(); }
616 <        public final String toString(){ return key + "=" + val; }
613 >        public final K getKey()     { return key; }
614 >        public final V getValue()   { return val; }
615 >        public final int hashCode() { return key.hashCode() ^ val.hashCode(); }
616 >        public final String toString() {
617 >            return Helpers.mapEntryToString(key, val);
618 >        }
619          public final V setValue(V value) {
620              throw new UnsupportedOperationException();
621          }
# Line 630 | Line 673 | public class ConcurrentHashMap<K,V> impl
673       * See Hackers Delight, sec 3.2
674       */
675      private static final int tableSizeFor(int c) {
676 <        int n = c - 1;
634 <        n |= n >>> 1;
635 <        n |= n >>> 2;
636 <        n |= n >>> 4;
637 <        n |= n >>> 8;
638 <        n |= n >>> 16;
676 >        int n = -1 >>> Integer.numberOfLeadingZeros(c - 1);
677          return (n < 0) ? 1 : (n >= MAXIMUM_CAPACITY) ? MAXIMUM_CAPACITY : n + 1;
678      }
679  
# Line 645 | Line 683 | public class ConcurrentHashMap<K,V> impl
683       */
684      static Class<?> comparableClassFor(Object x) {
685          if (x instanceof Comparable) {
686 <            Class<?> c; Type[] ts, as; Type t; ParameterizedType p;
686 >            Class<?> c; Type[] ts, as; ParameterizedType p;
687              if ((c = x.getClass()) == String.class) // bypass checks
688                  return c;
689              if ((ts = c.getGenericInterfaces()) != null) {
690 <                for (int i = 0; i < ts.length; ++i) {
691 <                    if (((t = ts[i]) instanceof ParameterizedType) &&
690 >                for (Type t : ts) {
691 >                    if ((t instanceof ParameterizedType) &&
692                          ((p = (ParameterizedType)t).getRawType() ==
693                           Comparable.class) &&
694                          (as = p.getActualTypeArguments()) != null &&
# Line 675 | Line 713 | public class ConcurrentHashMap<K,V> impl
713      /* ---------------- Table element access -------------- */
714  
715      /*
716 <     * Volatile access methods are used for table elements as well as
716 >     * Atomic access methods are used for table elements as well as
717       * elements of in-progress next table while resizing.  All uses of
718       * the tab arguments must be null checked by callers.  All callers
719       * also paranoically precheck that tab's length is not zero (or an
# Line 685 | Line 723 | public class ConcurrentHashMap<K,V> impl
723       * errors by users, these checks must operate on local variables,
724       * which accounts for some odd-looking inline assignments below.
725       * Note that calls to setTabAt always occur within locked regions,
726 <     * and so do not need full volatile semantics, but still require
689 <     * ordering to maintain concurrent readability.
726 >     * and so require only release ordering.
727       */
728  
729      @SuppressWarnings("unchecked")
730      static final <K,V> Node<K,V> tabAt(Node<K,V>[] tab, int i) {
731 <        return (Node<K,V>)U.getObjectVolatile(tab, ((long)i << ASHIFT) + ABASE);
731 >        return (Node<K,V>)U.getObjectAcquire(tab, ((long)i << ASHIFT) + ABASE);
732      }
733  
734      static final <K,V> boolean casTabAt(Node<K,V>[] tab, int i,
735                                          Node<K,V> c, Node<K,V> v) {
736 <        return U.compareAndSwapObject(tab, ((long)i << ASHIFT) + ABASE, c, v);
736 >        return U.compareAndSetObject(tab, ((long)i << ASHIFT) + ABASE, c, v);
737      }
738  
739      static final <K,V> void setTabAt(Node<K,V>[] tab, int i, Node<K,V> v) {
740 <        U.putOrderedObject(tab, ((long)i << ASHIFT) + ABASE, v);
740 >        U.putObjectRelease(tab, ((long)i << ASHIFT) + ABASE, v);
741      }
742  
743      /* ---------------- Fields -------------- */
# Line 739 | Line 776 | public class ConcurrentHashMap<K,V> impl
776      private transient volatile int transferIndex;
777  
778      /**
742     * The least available table index to split while resizing.
743     */
744    private transient volatile int transferOrigin;
745
746    /**
779       * Spinlock (locked via CAS) used when resizing and/or creating CounterCells.
780       */
781      private transient volatile int cellsBusy;
# Line 778 | Line 810 | public class ConcurrentHashMap<K,V> impl
810       * elements is negative
811       */
812      public ConcurrentHashMap(int initialCapacity) {
813 <        if (initialCapacity < 0)
782 <            throw new IllegalArgumentException();
783 <        int cap = ((initialCapacity >= (MAXIMUM_CAPACITY >>> 1)) ?
784 <                   MAXIMUM_CAPACITY :
785 <                   tableSizeFor(initialCapacity + (initialCapacity >>> 1) + 1));
786 <        this.sizeCtl = cap;
813 >        this(initialCapacity, LOAD_FACTOR, 1);
814      }
815  
816      /**
# Line 817 | Line 844 | public class ConcurrentHashMap<K,V> impl
844  
845      /**
846       * Creates a new, empty map with an initial table size based on
847 <     * the given number of elements ({@code initialCapacity}), table
848 <     * density ({@code loadFactor}), and number of concurrently
847 >     * the given number of elements ({@code initialCapacity}), initial
848 >     * table density ({@code loadFactor}), and number of concurrently
849       * updating threads ({@code concurrencyLevel}).
850       *
851       * @param initialCapacity the initial capacity. The implementation
# Line 956 | Line 983 | public class ConcurrentHashMap<K,V> impl
983          int hash = spread(key.hashCode());
984          int binCount = 0;
985          for (Node<K,V>[] tab = table;;) {
986 <            Node<K,V> f; int n, i, fh;
986 >            Node<K,V> f; int n, i, fh; K fk; V fv;
987              if (tab == null || (n = tab.length) == 0)
988                  tab = initTable();
989              else if ((f = tabAt(tab, i = (n - 1) & hash)) == null) {
990 <                if (casTabAt(tab, i, null,
964 <                             new Node<K,V>(hash, key, value, null)))
990 >                if (casTabAt(tab, i, null, new Node<K,V>(hash, key, value)))
991                      break;                   // no lock when adding to empty bin
992              }
993              else if ((fh = f.hash) == MOVED)
994                  tab = helpTransfer(tab, f);
995 +            else if (onlyIfAbsent // check first node without acquiring lock
996 +                     && fh == hash
997 +                     && ((fk = f.key) == key || (fk != null && key.equals(fk)))
998 +                     && (fv = f.val) != null)
999 +                return fv;
1000              else {
1001                  V oldVal = null;
1002                  synchronized (f) {
# Line 984 | Line 1015 | public class ConcurrentHashMap<K,V> impl
1015                                  }
1016                                  Node<K,V> pred = e;
1017                                  if ((e = e.next) == null) {
1018 <                                    pred.next = new Node<K,V>(hash, key,
988 <                                                              value, null);
1018 >                                    pred.next = new Node<K,V>(hash, key, value);
1019                                      break;
1020                                  }
1021                              }
# Line 1000 | Line 1030 | public class ConcurrentHashMap<K,V> impl
1030                                      p.val = value;
1031                              }
1032                          }
1033 +                        else if (f instanceof ReservationNode)
1034 +                            throw new IllegalStateException("Recursive update");
1035                      }
1036                  }
1037                  if (binCount != 0) {
# Line 1102 | Line 1134 | public class ConcurrentHashMap<K,V> impl
1134                                  }
1135                              }
1136                          }
1137 +                        else if (f instanceof ReservationNode)
1138 +                            throw new IllegalStateException("Recursive update");
1139                      }
1140                  }
1141                  if (validated) {
# Line 1162 | Line 1196 | public class ConcurrentHashMap<K,V> impl
1196       * operations.  It does not support the {@code add} or
1197       * {@code addAll} operations.
1198       *
1199 <     * <p>The view's {@code iterator} is a "weakly consistent" iterator
1200 <     * that will never throw {@link ConcurrentModificationException},
1201 <     * and guarantees to traverse elements as they existed upon
1202 <     * construction of the iterator, and may (but is not guaranteed to)
1203 <     * reflect any modifications subsequent to construction.
1199 >     * <p>The view's iterators and spliterators are
1200 >     * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>.
1201 >     *
1202 >     * <p>The view's {@code spliterator} reports {@link Spliterator#CONCURRENT},
1203 >     * {@link Spliterator#DISTINCT}, and {@link Spliterator#NONNULL}.
1204       *
1205       * @return the set view
1206       */
1207      public KeySetView<K,V> keySet() {
1208          KeySetView<K,V> ks;
1209 <        return (ks = keySet) != null ? ks : (keySet = new KeySetView<K,V>(this, null));
1209 >        if ((ks = keySet) != null) return ks;
1210 >        return keySet = new KeySetView<K,V>(this, null);
1211      }
1212  
1213      /**
# Line 1185 | Line 1220 | public class ConcurrentHashMap<K,V> impl
1220       * {@code retainAll}, and {@code clear} operations.  It does not
1221       * support the {@code add} or {@code addAll} operations.
1222       *
1223 <     * <p>The view's {@code iterator} is a "weakly consistent" iterator
1224 <     * that will never throw {@link ConcurrentModificationException},
1225 <     * and guarantees to traverse elements as they existed upon
1226 <     * construction of the iterator, and may (but is not guaranteed to)
1227 <     * reflect any modifications subsequent to construction.
1223 >     * <p>The view's iterators and spliterators are
1224 >     * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>.
1225 >     *
1226 >     * <p>The view's {@code spliterator} reports {@link Spliterator#CONCURRENT}
1227 >     * and {@link Spliterator#NONNULL}.
1228       *
1229       * @return the collection view
1230       */
1231      public Collection<V> values() {
1232          ValuesView<K,V> vs;
1233 <        return (vs = values) != null ? vs : (values = new ValuesView<K,V>(this));
1233 >        if ((vs = values) != null) return vs;
1234 >        return values = new ValuesView<K,V>(this);
1235      }
1236  
1237      /**
# Line 1207 | Line 1243 | public class ConcurrentHashMap<K,V> impl
1243       * {@code removeAll}, {@code retainAll}, and {@code clear}
1244       * operations.
1245       *
1246 <     * <p>The view's {@code iterator} is a "weakly consistent" iterator
1247 <     * that will never throw {@link ConcurrentModificationException},
1248 <     * and guarantees to traverse elements as they existed upon
1249 <     * construction of the iterator, and may (but is not guaranteed to)
1250 <     * reflect any modifications subsequent to construction.
1246 >     * <p>The view's iterators and spliterators are
1247 >     * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>.
1248 >     *
1249 >     * <p>The view's {@code spliterator} reports {@link Spliterator#CONCURRENT},
1250 >     * {@link Spliterator#DISTINCT}, and {@link Spliterator#NONNULL}.
1251       *
1252       * @return the set view
1253       */
1254      public Set<Map.Entry<K,V>> entrySet() {
1255          EntrySetView<K,V> es;
1256 <        return (es = entrySet) != null ? es : (entrySet = new EntrySetView<K,V>(this));
1256 >        if ((es = entrySet) != null) return es;
1257 >        return entrySet = new EntrySetView<K,V>(this);
1258      }
1259  
1260      /**
# Line 1309 | Line 1346 | public class ConcurrentHashMap<K,V> impl
1346  
1347      /**
1348       * Stripped-down version of helper class used in previous version,
1349 <     * declared for the sake of serialization compatibility
1349 >     * declared for the sake of serialization compatibility.
1350       */
1351      static class Segment<K,V> extends ReentrantLock implements Serializable {
1352          private static final long serialVersionUID = 2249069246763182397L;
# Line 1318 | Line 1355 | public class ConcurrentHashMap<K,V> impl
1355      }
1356  
1357      /**
1358 <     * Saves the state of the {@code ConcurrentHashMap} instance to a
1359 <     * stream (i.e., serializes it).
1358 >     * Saves this map to a stream (that is, serializes it).
1359 >     *
1360       * @param s the stream
1361 +     * @throws java.io.IOException if an I/O error occurs
1362       * @serialData
1363 <     * the key (Object) and value (Object)
1364 <     * for each key-value mapping, followed by a null pair.
1363 >     * the serialized fields, followed by the key (Object) and value
1364 >     * (Object) for each key-value mapping, followed by a null pair.
1365       * The key-value mappings are emitted in no particular order.
1366       */
1367      private void writeObject(java.io.ObjectOutputStream s)
# Line 1338 | Line 1376 | public class ConcurrentHashMap<K,V> impl
1376          }
1377          int segmentShift = 32 - sshift;
1378          int segmentMask = ssize - 1;
1379 <        @SuppressWarnings("unchecked") Segment<K,V>[] segments = (Segment<K,V>[])
1379 >        @SuppressWarnings("unchecked")
1380 >        Segment<K,V>[] segments = (Segment<K,V>[])
1381              new Segment<?,?>[DEFAULT_CONCURRENCY_LEVEL];
1382          for (int i = 0; i < segments.length; ++i)
1383              segments[i] = new Segment<K,V>(LOAD_FACTOR);
1384 <        s.putFields().put("segments", segments);
1385 <        s.putFields().put("segmentShift", segmentShift);
1386 <        s.putFields().put("segmentMask", segmentMask);
1384 >        java.io.ObjectOutputStream.PutField streamFields = s.putFields();
1385 >        streamFields.put("segments", segments);
1386 >        streamFields.put("segmentShift", segmentShift);
1387 >        streamFields.put("segmentMask", segmentMask);
1388          s.writeFields();
1389  
1390          Node<K,V>[] t;
# Line 1357 | Line 1397 | public class ConcurrentHashMap<K,V> impl
1397          }
1398          s.writeObject(null);
1399          s.writeObject(null);
1360        segments = null; // throw away
1400      }
1401  
1402      /**
1403 <     * Reconstitutes the instance from a stream (that is, deserializes it).
1403 >     * Reconstitutes this map from a stream (that is, deserializes it).
1404       * @param s the stream
1405 +     * @throws ClassNotFoundException if the class of a serialized object
1406 +     *         could not be found
1407 +     * @throws java.io.IOException if an I/O error occurs
1408       */
1409      private void readObject(java.io.ObjectInputStream s)
1410          throws java.io.IOException, ClassNotFoundException {
# Line 1378 | Line 1420 | public class ConcurrentHashMap<K,V> impl
1420          long size = 0L;
1421          Node<K,V> p = null;
1422          for (;;) {
1423 <            @SuppressWarnings("unchecked") K k = (K) s.readObject();
1424 <            @SuppressWarnings("unchecked") V v = (V) s.readObject();
1423 >            @SuppressWarnings("unchecked")
1424 >            K k = (K) s.readObject();
1425 >            @SuppressWarnings("unchecked")
1426 >            V v = (V) s.readObject();
1427              if (k != null && v != null) {
1428                  p = new Node<K,V>(spread(k.hashCode()), k, v, p);
1429                  ++size;
# Line 1390 | Line 1434 | public class ConcurrentHashMap<K,V> impl
1434          if (size == 0L)
1435              sizeCtl = 0;
1436          else {
1437 <            int n;
1438 <            if (size >= (long)(MAXIMUM_CAPACITY >>> 1))
1439 <                n = MAXIMUM_CAPACITY;
1440 <            else {
1441 <                int sz = (int)size;
1398 <                n = tableSizeFor(sz + (sz >>> 1) + 1);
1399 <            }
1400 <            @SuppressWarnings({"rawtypes","unchecked"})
1401 <                Node<K,V>[] tab = (Node<K,V>[])new Node[n];
1437 >            long ts = (long)(1.0 + size / LOAD_FACTOR);
1438 >            int n = (ts >= (long)MAXIMUM_CAPACITY) ?
1439 >                MAXIMUM_CAPACITY : tableSizeFor((int)ts);
1440 >            @SuppressWarnings("unchecked")
1441 >            Node<K,V>[] tab = (Node<K,V>[])new Node<?,?>[n];
1442              int mask = n - 1;
1443              long added = 0L;
1444              while (p != null) {
# Line 1556 | Line 1596 | public class ConcurrentHashMap<K,V> impl
1596      }
1597  
1598      /**
1599 +     * Helper method for EntrySetView.removeIf.
1600 +     */
1601 +    boolean removeEntryIf(Predicate<? super Entry<K,V>> function) {
1602 +        if (function == null) throw new NullPointerException();
1603 +        Node<K,V>[] t;
1604 +        boolean removed = false;
1605 +        if ((t = table) != null) {
1606 +            Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
1607 +            for (Node<K,V> p; (p = it.advance()) != null; ) {
1608 +                K k = p.key;
1609 +                V v = p.val;
1610 +                Map.Entry<K,V> e = new AbstractMap.SimpleImmutableEntry<>(k, v);
1611 +                if (function.test(e) && replaceNode(k, null, v) != null)
1612 +                    removed = true;
1613 +            }
1614 +        }
1615 +        return removed;
1616 +    }
1617 +
1618 +    /**
1619 +     * Helper method for ValuesView.removeIf.
1620 +     */
1621 +    boolean removeValueIf(Predicate<? super V> function) {
1622 +        if (function == null) throw new NullPointerException();
1623 +        Node<K,V>[] t;
1624 +        boolean removed = false;
1625 +        if ((t = table) != null) {
1626 +            Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
1627 +            for (Node<K,V> p; (p = it.advance()) != null; ) {
1628 +                K k = p.key;
1629 +                V v = p.val;
1630 +                if (function.test(v) && replaceNode(k, null, v) != null)
1631 +                    removed = true;
1632 +            }
1633 +        }
1634 +        return removed;
1635 +    }
1636 +
1637 +    /**
1638       * If the specified key is not already associated with a value,
1639       * attempts to compute its value using the given mapping function
1640       * and enters it into this map unless {@code null}.  The entire
# Line 1584 | Line 1663 | public class ConcurrentHashMap<K,V> impl
1663          V val = null;
1664          int binCount = 0;
1665          for (Node<K,V>[] tab = table;;) {
1666 <            Node<K,V> f; int n, i, fh;
1666 >            Node<K,V> f; int n, i, fh; K fk; V fv;
1667              if (tab == null || (n = tab.length) == 0)
1668                  tab = initTable();
1669              else if ((f = tabAt(tab, i = (n - 1) & h)) == null) {
# Line 1595 | Line 1674 | public class ConcurrentHashMap<K,V> impl
1674                          Node<K,V> node = null;
1675                          try {
1676                              if ((val = mappingFunction.apply(key)) != null)
1677 <                                node = new Node<K,V>(h, key, val, null);
1677 >                                node = new Node<K,V>(h, key, val);
1678                          } finally {
1679                              setTabAt(tab, i, node);
1680                          }
# Line 1606 | Line 1685 | public class ConcurrentHashMap<K,V> impl
1685              }
1686              else if ((fh = f.hash) == MOVED)
1687                  tab = helpTransfer(tab, f);
1688 +            else if (fh == h    // check first node without acquiring lock
1689 +                     && ((fk = f.key) == key || (fk != null && key.equals(fk)))
1690 +                     && (fv = f.val) != null)
1691 +                return fv;
1692              else {
1693                  boolean added = false;
1694                  synchronized (f) {
# Line 1613 | Line 1696 | public class ConcurrentHashMap<K,V> impl
1696                          if (fh >= 0) {
1697                              binCount = 1;
1698                              for (Node<K,V> e = f;; ++binCount) {
1699 <                                K ek; V ev;
1699 >                                K ek;
1700                                  if (e.hash == h &&
1701                                      ((ek = e.key) == key ||
1702                                       (ek != null && key.equals(ek)))) {
# Line 1623 | Line 1706 | public class ConcurrentHashMap<K,V> impl
1706                                  Node<K,V> pred = e;
1707                                  if ((e = e.next) == null) {
1708                                      if ((val = mappingFunction.apply(key)) != null) {
1709 +                                        if (pred.next != null)
1710 +                                            throw new IllegalStateException("Recursive update");
1711                                          added = true;
1712 <                                        pred.next = new Node<K,V>(h, key, val, null);
1712 >                                        pred.next = new Node<K,V>(h, key, val);
1713                                      }
1714                                      break;
1715                                  }
# Line 1642 | Line 1727 | public class ConcurrentHashMap<K,V> impl
1727                                  t.putTreeVal(h, key, val);
1728                              }
1729                          }
1730 +                        else if (f instanceof ReservationNode)
1731 +                            throw new IllegalStateException("Recursive update");
1732                      }
1733                  }
1734                  if (binCount != 0) {
# Line 1737 | Line 1824 | public class ConcurrentHashMap<K,V> impl
1824                                  }
1825                              }
1826                          }
1827 +                        else if (f instanceof ReservationNode)
1828 +                            throw new IllegalStateException("Recursive update");
1829                      }
1830                  }
1831                  if (binCount != 0)
# Line 1789 | Line 1878 | public class ConcurrentHashMap<K,V> impl
1878                          try {
1879                              if ((val = remappingFunction.apply(key, null)) != null) {
1880                                  delta = 1;
1881 <                                node = new Node<K,V>(h, key, val, null);
1881 >                                node = new Node<K,V>(h, key, val);
1882                              }
1883                          } finally {
1884                              setTabAt(tab, i, node);
# Line 1828 | Line 1917 | public class ConcurrentHashMap<K,V> impl
1917                                  if ((e = e.next) == null) {
1918                                      val = remappingFunction.apply(key, null);
1919                                      if (val != null) {
1920 +                                        if (pred.next != null)
1921 +                                            throw new IllegalStateException("Recursive update");
1922                                          delta = 1;
1923 <                                        pred.next =
1833 <                                            new Node<K,V>(h, key, val, null);
1923 >                                        pred.next = new Node<K,V>(h, key, val);
1924                                      }
1925                                      break;
1926                                  }
# Line 1860 | Line 1950 | public class ConcurrentHashMap<K,V> impl
1950                                      setTabAt(tab, i, untreeify(t.first));
1951                              }
1952                          }
1953 +                        else if (f instanceof ReservationNode)
1954 +                            throw new IllegalStateException("Recursive update");
1955                      }
1956                  }
1957                  if (binCount != 0) {
# Line 1906 | Line 1998 | public class ConcurrentHashMap<K,V> impl
1998              if (tab == null || (n = tab.length) == 0)
1999                  tab = initTable();
2000              else if ((f = tabAt(tab, i = (n - 1) & h)) == null) {
2001 <                if (casTabAt(tab, i, null, new Node<K,V>(h, key, value, null))) {
2001 >                if (casTabAt(tab, i, null, new Node<K,V>(h, key, value))) {
2002                      delta = 1;
2003                      val = value;
2004                      break;
# Line 1941 | Line 2033 | public class ConcurrentHashMap<K,V> impl
2033                                  if ((e = e.next) == null) {
2034                                      delta = 1;
2035                                      val = value;
2036 <                                    pred.next =
1945 <                                        new Node<K,V>(h, key, val, null);
2036 >                                    pred.next = new Node<K,V>(h, key, val);
2037                                      break;
2038                                  }
2039                              }
# Line 1969 | Line 2060 | public class ConcurrentHashMap<K,V> impl
2060                                      setTabAt(tab, i, untreeify(t.first));
2061                              }
2062                          }
2063 +                        else if (f instanceof ReservationNode)
2064 +                            throw new IllegalStateException("Recursive update");
2065                      }
2066                  }
2067                  if (binCount != 0) {
# Line 1986 | Line 2079 | public class ConcurrentHashMap<K,V> impl
2079      // Hashtable legacy methods
2080  
2081      /**
2082 <     * Legacy method testing if some key maps into the specified value
2083 <     * in this table.  This method is identical in functionality to
2082 >     * Tests if some key maps into the specified value in this table.
2083 >     *
2084 >     * <p>Note that this method is identical in functionality to
2085       * {@link #containsValue(Object)}, and exists solely to ensure
2086       * full compatibility with class {@link java.util.Hashtable},
2087       * which supported this method prior to introduction of the
2088 <     * Java Collections framework.
2088 >     * Java Collections Framework.
2089       *
2090       * @param  value a value to search for
2091       * @return {@code true} if and only if some key maps to the
# Line 2000 | Line 2094 | public class ConcurrentHashMap<K,V> impl
2094       *         {@code false} otherwise
2095       * @throws NullPointerException if the specified value is null
2096       */
2097 <    @Deprecated public boolean contains(Object value) {
2097 >    public boolean contains(Object value) {
2098          return containsValue(value);
2099      }
2100  
# Line 2049 | Line 2143 | public class ConcurrentHashMap<K,V> impl
2143       * Creates a new {@link Set} backed by a ConcurrentHashMap
2144       * from the given type to {@code Boolean.TRUE}.
2145       *
2146 +     * @param <K> the element type of the returned set
2147       * @return the new set
2148       * @since 1.8
2149       */
# Line 2063 | Line 2158 | public class ConcurrentHashMap<K,V> impl
2158       *
2159       * @param initialCapacity The implementation performs internal
2160       * sizing to accommodate this many elements.
2161 +     * @param <K> the element type of the returned set
2162 +     * @return the new set
2163       * @throws IllegalArgumentException if the initial capacity of
2164       * elements is negative
2068     * @return the new set
2165       * @since 1.8
2166       */
2167      public static <K> KeySetView<K,Boolean> newKeySet(int initialCapacity) {
# Line 2098 | Line 2194 | public class ConcurrentHashMap<K,V> impl
2194      static final class ForwardingNode<K,V> extends Node<K,V> {
2195          final Node<K,V>[] nextTable;
2196          ForwardingNode(Node<K,V>[] tab) {
2197 <            super(MOVED, null, null, null);
2197 >            super(MOVED, null, null);
2198              this.nextTable = tab;
2199          }
2200  
2201          Node<K,V> find(int h, Object k) {
2202 <            Node<K,V> e; int n;
2203 <            Node<K,V>[] tab = nextTable;
2204 <            if (k != null && tab != null && (n = tab.length) > 0 &&
2205 <                (e = tabAt(tab, (n - 1) & h)) != null) {
2206 <                do {
2202 >            // loop to avoid arbitrarily deep recursion on forwarding nodes
2203 >            outer: for (Node<K,V>[] tab = nextTable;;) {
2204 >                Node<K,V> e; int n;
2205 >                if (k == null || tab == null || (n = tab.length) == 0 ||
2206 >                    (e = tabAt(tab, (n - 1) & h)) == null)
2207 >                    return null;
2208 >                for (;;) {
2209                      int eh; K ek;
2210                      if ((eh = e.hash) == h &&
2211                          ((ek = e.key) == k || (ek != null && k.equals(ek))))
2212                          return e;
2213 <                    if (eh < 0)
2214 <                        return e.find(h, k);
2215 <                } while ((e = e.next) != null);
2213 >                    if (eh < 0) {
2214 >                        if (e instanceof ForwardingNode) {
2215 >                            tab = ((ForwardingNode<K,V>)e).nextTable;
2216 >                            continue outer;
2217 >                        }
2218 >                        else
2219 >                            return e.find(h, k);
2220 >                    }
2221 >                    if ((e = e.next) == null)
2222 >                        return null;
2223 >                }
2224              }
2119            return null;
2225          }
2226      }
2227  
2228      /**
2229 <     * A place-holder node used in computeIfAbsent and compute
2229 >     * A place-holder node used in computeIfAbsent and compute.
2230       */
2231      static final class ReservationNode<K,V> extends Node<K,V> {
2232          ReservationNode() {
2233 <            super(RESERVED, null, null, null);
2233 >            super(RESERVED, null, null);
2234          }
2235  
2236          Node<K,V> find(int h, Object k) {
# Line 2136 | Line 2241 | public class ConcurrentHashMap<K,V> impl
2241      /* ---------------- Table Initialization and Resizing -------------- */
2242  
2243      /**
2244 +     * Returns the stamp bits for resizing a table of size n.
2245 +     * Must be negative when shifted left by RESIZE_STAMP_SHIFT.
2246 +     */
2247 +    static final int resizeStamp(int n) {
2248 +        return Integer.numberOfLeadingZeros(n) | (1 << (RESIZE_STAMP_BITS - 1));
2249 +    }
2250 +
2251 +    /**
2252       * Initializes table, using the size recorded in sizeCtl.
2253       */
2254      private final Node<K,V>[] initTable() {
# Line 2143 | Line 2256 | public class ConcurrentHashMap<K,V> impl
2256          while ((tab = table) == null || tab.length == 0) {
2257              if ((sc = sizeCtl) < 0)
2258                  Thread.yield(); // lost initialization race; just spin
2259 <            else if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
2259 >            else if (U.compareAndSetInt(this, SIZECTL, sc, -1)) {
2260                  try {
2261                      if ((tab = table) == null || tab.length == 0) {
2262                          int n = (sc > 0) ? sc : DEFAULT_CAPACITY;
2263 <                        @SuppressWarnings({"rawtypes","unchecked"})
2264 <                            Node<K,V>[] nt = (Node<K,V>[])new Node[n];
2263 >                        @SuppressWarnings("unchecked")
2264 >                        Node<K,V>[] nt = (Node<K,V>[])new Node<?,?>[n];
2265                          table = tab = nt;
2266                          sc = n - (n >>> 2);
2267                      }
# Line 2172 | Line 2285 | public class ConcurrentHashMap<K,V> impl
2285       * @param check if <0, don't check resize, if <= 1 only check if uncontended
2286       */
2287      private final void addCount(long x, int check) {
2288 <        CounterCell[] as; long b, s;
2289 <        if ((as = counterCells) != null ||
2290 <            !U.compareAndSwapLong(this, BASECOUNT, b = baseCount, s = b + x)) {
2291 <            CounterCell a; long v; int m;
2288 >        CounterCell[] cs; long b, s;
2289 >        if ((cs = counterCells) != null ||
2290 >            !U.compareAndSetLong(this, BASECOUNT, b = baseCount, s = b + x)) {
2291 >            CounterCell c; long v; int m;
2292              boolean uncontended = true;
2293 <            if (as == null || (m = as.length - 1) < 0 ||
2294 <                (a = as[ThreadLocalRandom.getProbe() & m]) == null ||
2293 >            if (cs == null || (m = cs.length - 1) < 0 ||
2294 >                (c = cs[ThreadLocalRandom.getProbe() & m]) == null ||
2295                  !(uncontended =
2296 <                  U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))) {
2296 >                  U.compareAndSetLong(c, CELLVALUE, v = c.value, v + x))) {
2297                  fullAddCount(x, uncontended);
2298                  return;
2299              }
# Line 2189 | Line 2302 | public class ConcurrentHashMap<K,V> impl
2302              s = sumCount();
2303          }
2304          if (check >= 0) {
2305 <            Node<K,V>[] tab, nt; int sc;
2305 >            Node<K,V>[] tab, nt; int n, sc;
2306              while (s >= (long)(sc = sizeCtl) && (tab = table) != null &&
2307 <                   tab.length < MAXIMUM_CAPACITY) {
2307 >                   (n = tab.length) < MAXIMUM_CAPACITY) {
2308 >                int rs = resizeStamp(n);
2309                  if (sc < 0) {
2310 <                    if (sc == -1 || transferIndex <= transferOrigin ||
2311 <                        (nt = nextTable) == null)
2310 >                    if ((sc >>> RESIZE_STAMP_SHIFT) != rs || sc == rs + 1 ||
2311 >                        sc == rs + MAX_RESIZERS || (nt = nextTable) == null ||
2312 >                        transferIndex <= 0)
2313                          break;
2314 <                    if (U.compareAndSwapInt(this, SIZECTL, sc, sc - 1))
2314 >                    if (U.compareAndSetInt(this, SIZECTL, sc, sc + 1))
2315                          transfer(tab, nt);
2316                  }
2317 <                else if (U.compareAndSwapInt(this, SIZECTL, sc, -2))
2317 >                else if (U.compareAndSetInt(this, SIZECTL, sc,
2318 >                                             (rs << RESIZE_STAMP_SHIFT) + 2))
2319                      transfer(tab, null);
2320                  s = sumCount();
2321              }
# Line 2211 | Line 2327 | public class ConcurrentHashMap<K,V> impl
2327       */
2328      final Node<K,V>[] helpTransfer(Node<K,V>[] tab, Node<K,V> f) {
2329          Node<K,V>[] nextTab; int sc;
2330 <        if ((f instanceof ForwardingNode) &&
2330 >        if (tab != null && (f instanceof ForwardingNode) &&
2331              (nextTab = ((ForwardingNode<K,V>)f).nextTable) != null) {
2332 <            if (nextTab == nextTable && tab == table &&
2333 <                transferIndex > transferOrigin && (sc = sizeCtl) < -1 &&
2334 <                U.compareAndSwapInt(this, SIZECTL, sc, sc - 1))
2335 <                transfer(tab, nextTab);
2332 >            int rs = resizeStamp(tab.length);
2333 >            while (nextTab == nextTable && table == tab &&
2334 >                   (sc = sizeCtl) < 0) {
2335 >                if ((sc >>> RESIZE_STAMP_SHIFT) != rs || sc == rs + 1 ||
2336 >                    sc == rs + MAX_RESIZERS || transferIndex <= 0)
2337 >                    break;
2338 >                if (U.compareAndSetInt(this, SIZECTL, sc, sc + 1)) {
2339 >                    transfer(tab, nextTab);
2340 >                    break;
2341 >                }
2342 >            }
2343              return nextTab;
2344          }
2345          return table;
# Line 2235 | Line 2358 | public class ConcurrentHashMap<K,V> impl
2358              Node<K,V>[] tab = table; int n;
2359              if (tab == null || (n = tab.length) == 0) {
2360                  n = (sc > c) ? sc : c;
2361 <                if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
2361 >                if (U.compareAndSetInt(this, SIZECTL, sc, -1)) {
2362                      try {
2363                          if (table == tab) {
2364 <                            @SuppressWarnings({"rawtypes","unchecked"})
2365 <                                Node<K,V>[] nt = (Node<K,V>[])new Node[n];
2364 >                            @SuppressWarnings("unchecked")
2365 >                            Node<K,V>[] nt = (Node<K,V>[])new Node<?,?>[n];
2366                              table = nt;
2367                              sc = n - (n >>> 2);
2368                          }
# Line 2250 | Line 2373 | public class ConcurrentHashMap<K,V> impl
2373              }
2374              else if (c <= sc || n >= MAXIMUM_CAPACITY)
2375                  break;
2376 <            else if (tab == table &&
2377 <                     U.compareAndSwapInt(this, SIZECTL, sc, -2))
2378 <                transfer(tab, null);
2376 >            else if (tab == table) {
2377 >                int rs = resizeStamp(n);
2378 >                if (U.compareAndSetInt(this, SIZECTL, sc,
2379 >                                        (rs << RESIZE_STAMP_SHIFT) + 2))
2380 >                    transfer(tab, null);
2381 >            }
2382          }
2383      }
2384  
# Line 2266 | Line 2392 | public class ConcurrentHashMap<K,V> impl
2392              stride = MIN_TRANSFER_STRIDE; // subdivide range
2393          if (nextTab == null) {            // initiating
2394              try {
2395 <                @SuppressWarnings({"rawtypes","unchecked"})
2396 <                    Node<K,V>[] nt = (Node<K,V>[])new Node[n << 1];
2395 >                @SuppressWarnings("unchecked")
2396 >                Node<K,V>[] nt = (Node<K,V>[])new Node<?,?>[n << 1];
2397                  nextTab = nt;
2398              } catch (Throwable ex) {      // try to cope with OOME
2399                  sizeCtl = Integer.MAX_VALUE;
2400                  return;
2401              }
2402              nextTable = nextTab;
2277            transferOrigin = n;
2403              transferIndex = n;
2279            ForwardingNode<K,V> rev = new ForwardingNode<K,V>(tab);
2280            for (int k = n; k > 0;) {    // progressively reveal ready slots
2281                int nextk = (k > stride) ? k - stride : 0;
2282                for (int m = nextk; m < k; ++m)
2283                    nextTab[m] = rev;
2284                for (int m = n + nextk; m < n + k; ++m)
2285                    nextTab[m] = rev;
2286                U.putOrderedInt(this, TRANSFERORIGIN, k = nextk);
2287            }
2404          }
2405          int nextn = nextTab.length;
2406          ForwardingNode<K,V> fwd = new ForwardingNode<K,V>(nextTab);
2407          boolean advance = true;
2408 +        boolean finishing = false; // to ensure sweep before committing nextTab
2409          for (int i = 0, bound = 0;;) {
2410 <            int nextIndex, nextBound, fh; Node<K,V> f;
2410 >            Node<K,V> f; int fh;
2411              while (advance) {
2412 <                if (--i >= bound)
2412 >                int nextIndex, nextBound;
2413 >                if (--i >= bound || finishing)
2414                      advance = false;
2415 <                else if ((nextIndex = transferIndex) <= transferOrigin) {
2415 >                else if ((nextIndex = transferIndex) <= 0) {
2416                      i = -1;
2417                      advance = false;
2418                  }
2419 <                else if (U.compareAndSwapInt
2419 >                else if (U.compareAndSetInt
2420                           (this, TRANSFERINDEX, nextIndex,
2421                            nextBound = (nextIndex > stride ?
2422                                         nextIndex - stride : 0))) {
# Line 2308 | Line 2426 | public class ConcurrentHashMap<K,V> impl
2426                  }
2427              }
2428              if (i < 0 || i >= n || i + n >= nextn) {
2429 <                for (int sc;;) {
2430 <                    if (U.compareAndSwapInt(this, SIZECTL, sc = sizeCtl, ++sc)) {
2431 <                        if (sc == -1) {
2432 <                            nextTable = null;
2433 <                            table = nextTab;
2434 <                            sizeCtl = (n << 1) - (n >>> 1);
2317 <                        }
2318 <                        return;
2319 <                    }
2429 >                int sc;
2430 >                if (finishing) {
2431 >                    nextTable = null;
2432 >                    table = nextTab;
2433 >                    sizeCtl = (n << 1) - (n >>> 1);
2434 >                    return;
2435                  }
2436 <            }
2437 <            else if ((f = tabAt(tab, i)) == null) {
2438 <                if (casTabAt(tab, i, null, fwd)) {
2439 <                    setTabAt(nextTab, i, null);
2440 <                    setTabAt(nextTab, i + n, null);
2326 <                    advance = true;
2436 >                if (U.compareAndSetInt(this, SIZECTL, sc = sizeCtl, sc - 1)) {
2437 >                    if ((sc - 2) != resizeStamp(n) << RESIZE_STAMP_SHIFT)
2438 >                        return;
2439 >                    finishing = advance = true;
2440 >                    i = n; // recheck before commit
2441                  }
2442              }
2443 +            else if ((f = tabAt(tab, i)) == null)
2444 +                advance = casTabAt(tab, i, null, fwd);
2445              else if ((fh = f.hash) == MOVED)
2446                  advance = true; // already processed
2447              else {
# Line 2357 | Line 2473 | public class ConcurrentHashMap<K,V> impl
2473                                  else
2474                                      hn = new Node<K,V>(ph, pk, pv, hn);
2475                              }
2476 +                            setTabAt(nextTab, i, ln);
2477 +                            setTabAt(nextTab, i + n, hn);
2478 +                            setTabAt(tab, i, fwd);
2479 +                            advance = true;
2480                          }
2481                          else if (f instanceof TreeBin) {
2482                              TreeBin<K,V> t = (TreeBin<K,V>)f;
# Line 2388 | Line 2508 | public class ConcurrentHashMap<K,V> impl
2508                                  (hc != 0) ? new TreeBin<K,V>(lo) : t;
2509                              hn = (hc <= UNTREEIFY_THRESHOLD) ? untreeify(hi) :
2510                                  (lc != 0) ? new TreeBin<K,V>(hi) : t;
2511 +                            setTabAt(nextTab, i, ln);
2512 +                            setTabAt(nextTab, i + n, hn);
2513 +                            setTabAt(tab, i, fwd);
2514 +                            advance = true;
2515                          }
2392                        else
2393                            ln = hn = null;
2394                        setTabAt(nextTab, i, ln);
2395                        setTabAt(nextTab, i + n, hn);
2396                        setTabAt(tab, i, fwd);
2397                        advance = true;
2516                      }
2517                  }
2518              }
# Line 2407 | Line 2525 | public class ConcurrentHashMap<K,V> impl
2525       * A padded cell for distributing counts.  Adapted from LongAdder
2526       * and Striped64.  See their internal docs for explanation.
2527       */
2528 <    @sun.misc.Contended static final class CounterCell {
2528 >    @jdk.internal.vm.annotation.Contended static final class CounterCell {
2529          volatile long value;
2530          CounterCell(long x) { value = x; }
2531      }
2532  
2533      final long sumCount() {
2534 <        CounterCell[] as = counterCells; CounterCell a;
2534 >        CounterCell[] cs = counterCells;
2535          long sum = baseCount;
2536 <        if (as != null) {
2537 <            for (int i = 0; i < as.length; ++i) {
2538 <                if ((a = as[i]) != null)
2539 <                    sum += a.value;
2422 <            }
2536 >        if (cs != null) {
2537 >            for (CounterCell c : cs)
2538 >                if (c != null)
2539 >                    sum += c.value;
2540          }
2541          return sum;
2542      }
# Line 2434 | Line 2551 | public class ConcurrentHashMap<K,V> impl
2551          }
2552          boolean collide = false;                // True if last slot nonempty
2553          for (;;) {
2554 <            CounterCell[] as; CounterCell a; int n; long v;
2555 <            if ((as = counterCells) != null && (n = as.length) > 0) {
2556 <                if ((a = as[(n - 1) & h]) == null) {
2554 >            CounterCell[] cs; CounterCell c; int n; long v;
2555 >            if ((cs = counterCells) != null && (n = cs.length) > 0) {
2556 >                if ((c = cs[(n - 1) & h]) == null) {
2557                      if (cellsBusy == 0) {            // Try to attach new Cell
2558                          CounterCell r = new CounterCell(x); // Optimistic create
2559                          if (cellsBusy == 0 &&
2560 <                            U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
2560 >                            U.compareAndSetInt(this, CELLSBUSY, 0, 1)) {
2561                              boolean created = false;
2562                              try {               // Recheck under lock
2563                                  CounterCell[] rs; int m, j;
# Line 2462 | Line 2579 | public class ConcurrentHashMap<K,V> impl
2579                  }
2580                  else if (!wasUncontended)       // CAS already known to fail
2581                      wasUncontended = true;      // Continue after rehash
2582 <                else if (U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))
2582 >                else if (U.compareAndSetLong(c, CELLVALUE, v = c.value, v + x))
2583                      break;
2584 <                else if (counterCells != as || n >= NCPU)
2584 >                else if (counterCells != cs || n >= NCPU)
2585                      collide = false;            // At max size or stale
2586                  else if (!collide)
2587                      collide = true;
2588                  else if (cellsBusy == 0 &&
2589 <                         U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
2589 >                         U.compareAndSetInt(this, CELLSBUSY, 0, 1)) {
2590                      try {
2591 <                        if (counterCells == as) {// Expand table unless stale
2592 <                            CounterCell[] rs = new CounterCell[n << 1];
2476 <                            for (int i = 0; i < n; ++i)
2477 <                                rs[i] = as[i];
2478 <                            counterCells = rs;
2479 <                        }
2591 >                        if (counterCells == cs) // Expand table unless stale
2592 >                            counterCells = Arrays.copyOf(cs, n << 1);
2593                      } finally {
2594                          cellsBusy = 0;
2595                      }
# Line 2485 | Line 2598 | public class ConcurrentHashMap<K,V> impl
2598                  }
2599                  h = ThreadLocalRandom.advanceProbe(h);
2600              }
2601 <            else if (cellsBusy == 0 && counterCells == as &&
2602 <                     U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
2601 >            else if (cellsBusy == 0 && counterCells == cs &&
2602 >                     U.compareAndSetInt(this, CELLSBUSY, 0, 1)) {
2603                  boolean init = false;
2604                  try {                           // Initialize table
2605 <                    if (counterCells == as) {
2605 >                    if (counterCells == cs) {
2606                          CounterCell[] rs = new CounterCell[2];
2607                          rs[h & 1] = new CounterCell(x);
2608                          counterCells = rs;
# Line 2501 | Line 2614 | public class ConcurrentHashMap<K,V> impl
2614                  if (init)
2615                      break;
2616              }
2617 <            else if (U.compareAndSwapLong(this, BASECOUNT, v = baseCount, v + x))
2617 >            else if (U.compareAndSetLong(this, BASECOUNT, v = baseCount, v + x))
2618                  break;                          // Fall back on using base
2619          }
2620      }
# Line 2513 | Line 2626 | public class ConcurrentHashMap<K,V> impl
2626       * too small, in which case resizes instead.
2627       */
2628      private final void treeifyBin(Node<K,V>[] tab, int index) {
2629 <        Node<K,V> b; int n, sc;
2629 >        Node<K,V> b; int n;
2630          if (tab != null) {
2631 <            if ((n = tab.length) < MIN_TREEIFY_CAPACITY) {
2632 <                if (tab == table && (sc = sizeCtl) >= 0 &&
2633 <                    U.compareAndSwapInt(this, SIZECTL, sc, -2))
2521 <                    transfer(tab, null);
2522 <            }
2523 <            else if ((b = tabAt(tab, index)) != null) {
2631 >            if ((n = tab.length) < MIN_TREEIFY_CAPACITY)
2632 >                tryPresize(n << 1);
2633 >            else if ((b = tabAt(tab, index)) != null && b.hash >= 0) {
2634                  synchronized (b) {
2635                      if (tabAt(tab, index) == b) {
2636                          TreeNode<K,V> hd = null, tl = null;
# Line 2542 | Line 2652 | public class ConcurrentHashMap<K,V> impl
2652      }
2653  
2654      /**
2655 <     * Returns a list on non-TreeNodes replacing those in given list
2655 >     * Returns a list of non-TreeNodes replacing those in given list.
2656       */
2657      static <K,V> Node<K,V> untreeify(Node<K,V> b) {
2658          Node<K,V> hd = null, tl = null;
2659          for (Node<K,V> q = b; q != null; q = q.next) {
2660 <            Node<K,V> p = new Node<K,V>(q.hash, q.key, q.val, null);
2660 >            Node<K,V> p = new Node<K,V>(q.hash, q.key, q.val);
2661              if (tl == null)
2662                  hd = p;
2663              else
# Line 2560 | Line 2670 | public class ConcurrentHashMap<K,V> impl
2670      /* ---------------- TreeNodes -------------- */
2671  
2672      /**
2673 <     * Nodes for use in TreeBins
2673 >     * Nodes for use in TreeBins.
2674       */
2675      static final class TreeNode<K,V> extends Node<K,V> {
2676          TreeNode<K,V> parent;  // red-black tree links
# Line 2586 | Line 2696 | public class ConcurrentHashMap<K,V> impl
2696          final TreeNode<K,V> findTreeNode(int h, Object k, Class<?> kc) {
2697              if (k != null) {
2698                  TreeNode<K,V> p = this;
2699 <                do  {
2699 >                do {
2700                      int ph, dir; K pk; TreeNode<K,V> q;
2701                      TreeNode<K,V> pl = p.left, pr = p.right;
2702                      if ((ph = p.hash) > h)
# Line 2595 | Line 2705 | public class ConcurrentHashMap<K,V> impl
2705                          p = pr;
2706                      else if ((pk = p.key) == k || (pk != null && k.equals(pk)))
2707                          return p;
2708 <                    else if (pl == null && pr == null)
2709 <                        break;
2708 >                    else if (pl == null)
2709 >                        p = pr;
2710 >                    else if (pr == null)
2711 >                        p = pl;
2712                      else if ((kc != null ||
2713                                (kc = comparableClassFor(k)) != null) &&
2714                               (dir = compareComparables(kc, k, pk)) != 0)
2715                          p = (dir < 0) ? pl : pr;
2716 <                    else if (pl == null)
2605 <                        p = pr;
2606 <                    else if (pr == null ||
2607 <                             (q = pr.findTreeNode(h, k, kc)) == null)
2608 <                        p = pl;
2609 <                    else
2716 >                    else if ((q = pr.findTreeNode(h, k, kc)) != null)
2717                          return q;
2718 +                    else
2719 +                        p = pl;
2720                  } while (p != null);
2721              }
2722              return null;
# Line 2634 | Line 2743 | public class ConcurrentHashMap<K,V> impl
2743          static final int READER = 4; // increment value for setting read lock
2744  
2745          /**
2746 +         * Tie-breaking utility for ordering insertions when equal
2747 +         * hashCodes and non-comparable. We don't require a total
2748 +         * order, just a consistent insertion rule to maintain
2749 +         * equivalence across rebalancings. Tie-breaking further than
2750 +         * necessary simplifies testing a bit.
2751 +         */
2752 +        static int tieBreakOrder(Object a, Object b) {
2753 +            int d;
2754 +            if (a == null || b == null ||
2755 +                (d = a.getClass().getName().
2756 +                 compareTo(b.getClass().getName())) == 0)
2757 +                d = (System.identityHashCode(a) <= System.identityHashCode(b) ?
2758 +                     -1 : 1);
2759 +            return d;
2760 +        }
2761 +
2762 +        /**
2763           * Creates bin with initial set of nodes headed by b.
2764           */
2765          TreeBin(TreeNode<K,V> b) {
2766 <            super(TREEBIN, null, null, null);
2766 >            super(TREEBIN, null, null);
2767              this.first = b;
2768              TreeNode<K,V> r = null;
2769              for (TreeNode<K,V> x = b, next; x != null; x = next) {
# Line 2649 | Line 2775 | public class ConcurrentHashMap<K,V> impl
2775                      r = x;
2776                  }
2777                  else {
2778 <                    Object key = x.key;
2779 <                    int hash = x.hash;
2778 >                    K k = x.key;
2779 >                    int h = x.hash;
2780                      Class<?> kc = null;
2781                      for (TreeNode<K,V> p = r;;) {
2782                          int dir, ph;
2783 <                        if ((ph = p.hash) > hash)
2783 >                        K pk = p.key;
2784 >                        if ((ph = p.hash) > h)
2785                              dir = -1;
2786 <                        else if (ph < hash)
2786 >                        else if (ph < h)
2787                              dir = 1;
2788 <                        else if ((kc != null ||
2789 <                                  (kc = comparableClassFor(key)) != null))
2790 <                            dir = compareComparables(kc, key, p.key);
2791 <                        else
2665 <                            dir = 0;
2788 >                        else if ((kc == null &&
2789 >                                  (kc = comparableClassFor(k)) == null) ||
2790 >                                 (dir = compareComparables(kc, k, pk)) == 0)
2791 >                            dir = tieBreakOrder(k, pk);
2792                          TreeNode<K,V> xp = p;
2793                          if ((p = (dir <= 0) ? p.left : p.right) == null) {
2794                              x.parent = xp;
# Line 2677 | Line 2803 | public class ConcurrentHashMap<K,V> impl
2803                  }
2804              }
2805              this.root = r;
2806 +            assert checkInvariants(root);
2807          }
2808  
2809          /**
2810 <         * Acquires write lock for tree restructuring
2810 >         * Acquires write lock for tree restructuring.
2811           */
2812          private final void lockRoot() {
2813 <            if (!U.compareAndSwapInt(this, LOCKSTATE, 0, WRITER))
2813 >            if (!U.compareAndSetInt(this, LOCKSTATE, 0, WRITER))
2814                  contendedLock(); // offload to separate method
2815          }
2816  
2817          /**
2818 <         * Releases write lock for tree restructuring
2818 >         * Releases write lock for tree restructuring.
2819           */
2820          private final void unlockRoot() {
2821              lockState = 0;
2822          }
2823  
2824          /**
2825 <         * Possibly blocks awaiting root lock
2825 >         * Possibly blocks awaiting root lock.
2826           */
2827          private final void contendedLock() {
2828              boolean waiting = false;
2829              for (int s;;) {
2830 <                if (((s = lockState) & WRITER) == 0) {
2831 <                    if (U.compareAndSwapInt(this, LOCKSTATE, s, WRITER)) {
2830 >                if (((s = lockState) & ~WAITER) == 0) {
2831 >                    if (U.compareAndSetInt(this, LOCKSTATE, s, WRITER)) {
2832                          if (waiting)
2833                              waiter = null;
2834                          return;
2835                      }
2836                  }
2837 <                else if ((s | WAITER) == 0) {
2838 <                    if (U.compareAndSwapInt(this, LOCKSTATE, s, s | WAITER)) {
2837 >                else if ((s & WAITER) == 0) {
2838 >                    if (U.compareAndSetInt(this, LOCKSTATE, s, s | WAITER)) {
2839                          waiting = true;
2840                          waiter = Thread.currentThread();
2841                      }
# Line 2720 | Line 2847 | public class ConcurrentHashMap<K,V> impl
2847  
2848          /**
2849           * Returns matching node or null if none. Tries to search
2850 <         * using tree compareisons from root, but continues linear
2850 >         * using tree comparisons from root, but continues linear
2851           * search when lock not available.
2852           */
2853          final Node<K,V> find(int h, Object k) {
2854              if (k != null) {
2855 <                for (Node<K,V> e = first; e != null; e = e.next) {
2855 >                for (Node<K,V> e = first; e != null; ) {
2856                      int s; K ek;
2857                      if (((s = lockState) & (WAITER|WRITER)) != 0) {
2858                          if (e.hash == h &&
2859                              ((ek = e.key) == k || (ek != null && k.equals(ek))))
2860                              return e;
2861 +                        e = e.next;
2862                      }
2863 <                    else if (U.compareAndSwapInt(this, LOCKSTATE, s,
2863 >                    else if (U.compareAndSetInt(this, LOCKSTATE, s,
2864                                                   s + READER)) {
2865                          TreeNode<K,V> r, p;
2866                          try {
# Line 2757 | Line 2885 | public class ConcurrentHashMap<K,V> impl
2885           */
2886          final TreeNode<K,V> putTreeVal(int h, K k, V v) {
2887              Class<?> kc = null;
2888 +            boolean searched = false;
2889              for (TreeNode<K,V> p = root;;) {
2890 <                int dir, ph; K pk; TreeNode<K,V> q, pr;
2890 >                int dir, ph; K pk;
2891                  if (p == null) {
2892                      first = root = new TreeNode<K,V>(h, k, v, null, null);
2893                      break;
# Line 2772 | Line 2901 | public class ConcurrentHashMap<K,V> impl
2901                  else if ((kc == null &&
2902                            (kc = comparableClassFor(k)) == null) ||
2903                           (dir = compareComparables(kc, k, pk)) == 0) {
2904 <                    if (p.left == null)
2905 <                        dir = 1;
2906 <                    else if ((pr = p.right) == null ||
2907 <                             (q = pr.findTreeNode(h, k, kc)) == null)
2908 <                        dir = -1;
2909 <                    else
2910 <                        return q;
2904 >                    if (!searched) {
2905 >                        TreeNode<K,V> q, ch;
2906 >                        searched = true;
2907 >                        if (((ch = p.left) != null &&
2908 >                             (q = ch.findTreeNode(h, k, kc)) != null) ||
2909 >                            ((ch = p.right) != null &&
2910 >                             (q = ch.findTreeNode(h, k, kc)) != null))
2911 >                            return q;
2912 >                    }
2913 >                    dir = tieBreakOrder(k, pk);
2914                  }
2915 +
2916                  TreeNode<K,V> xp = p;
2917 <                if ((p = (dir < 0) ? p.left : p.right) == null) {
2917 >                if ((p = (dir <= 0) ? p.left : p.right) == null) {
2918                      TreeNode<K,V> x, f = first;
2919                      first = x = new TreeNode<K,V>(h, k, v, f, xp);
2920                      if (f != null)
2921                          f.prev = x;
2922 <                    if (dir < 0)
2922 >                    if (dir <= 0)
2923                          xp.left = x;
2924                      else
2925                          xp.right = x;
# Line 2815 | Line 2948 | public class ConcurrentHashMap<K,V> impl
2948           * that are accessible independently of lock. So instead we
2949           * swap the tree linkages.
2950           *
2951 <         * @return true if now too small so should be untreeified.
2951 >         * @return true if now too small, so should be untreeified
2952           */
2953          final boolean removeTreeNode(TreeNode<K,V> p) {
2954              TreeNode<K,V> next = (TreeNode<K,V>)p.next;
# Line 3009 | Line 3142 | public class ConcurrentHashMap<K,V> impl
3142  
3143          static <K,V> TreeNode<K,V> balanceDeletion(TreeNode<K,V> root,
3144                                                     TreeNode<K,V> x) {
3145 <            for (TreeNode<K,V> xp, xpl, xpr;;)  {
3145 >            for (TreeNode<K,V> xp, xpl, xpr;;) {
3146                  if (x == null || x == root)
3147                      return root;
3148                  else if ((xp = x.parent) == null) {
# Line 3100 | Line 3233 | public class ConcurrentHashMap<K,V> impl
3233          }
3234  
3235          /**
3236 <         * Recursive invariant check
3236 >         * Checks invariants recursively for the tree of Nodes rooted at t.
3237           */
3238          static <K,V> boolean checkInvariants(TreeNode<K,V> t) {
3239              TreeNode<K,V> tp = t.parent, tl = t.left, tr = t.right,
# Line 3124 | Line 3257 | public class ConcurrentHashMap<K,V> impl
3257              return true;
3258          }
3259  
3260 <        private static final sun.misc.Unsafe U;
3260 >        private static final Unsafe U = Unsafe.getUnsafe();
3261          private static final long LOCKSTATE;
3262          static {
3263              try {
3131                U = sun.misc.Unsafe.getUnsafe();
3132                Class<?> k = TreeBin.class;
3264                  LOCKSTATE = U.objectFieldOffset
3265 <                    (k.getDeclaredField("lockState"));
3266 <            } catch (Exception e) {
3267 <                throw new Error(e);
3265 >                    (TreeBin.class.getDeclaredField("lockState"));
3266 >            } catch (ReflectiveOperationException e) {
3267 >                throw new ExceptionInInitializerError(e);
3268              }
3269          }
3270      }
# Line 3141 | Line 3272 | public class ConcurrentHashMap<K,V> impl
3272      /* ----------------Table Traversal -------------- */
3273  
3274      /**
3275 +     * Records the table, its length, and current traversal index for a
3276 +     * traverser that must process a region of a forwarded table before
3277 +     * proceeding with current table.
3278 +     */
3279 +    static final class TableStack<K,V> {
3280 +        int length;
3281 +        int index;
3282 +        Node<K,V>[] tab;
3283 +        TableStack<K,V> next;
3284 +    }
3285 +
3286 +    /**
3287       * Encapsulates traversal for methods such as containsValue; also
3288       * serves as a base class for other iterators and spliterators.
3289       *
# Line 3164 | Line 3307 | public class ConcurrentHashMap<K,V> impl
3307      static class Traverser<K,V> {
3308          Node<K,V>[] tab;        // current table; updated if resized
3309          Node<K,V> next;         // the next entry to use
3310 +        TableStack<K,V> stack, spare; // to save/restore on ForwardingNodes
3311          int index;              // index of bin to use next
3312          int baseIndex;          // current index of initial table
3313          int baseLimit;          // index bound for initial table
# Line 3185 | Line 3329 | public class ConcurrentHashMap<K,V> impl
3329              if ((e = next) != null)
3330                  e = e.next;
3331              for (;;) {
3332 <                Node<K,V>[] t; int i, n; K ek;  // must use locals in checks
3332 >                Node<K,V>[] t; int i, n;  // must use locals in checks
3333                  if (e != null)
3334                      return next = e;
3335                  if (baseIndex >= baseLimit || (t = tab) == null ||
3336                      (n = t.length) <= (i = index) || i < 0)
3337                      return next = null;
3338 <                if ((e = tabAt(t, index)) != null && e.hash < 0) {
3338 >                if ((e = tabAt(t, i)) != null && e.hash < 0) {
3339                      if (e instanceof ForwardingNode) {
3340                          tab = ((ForwardingNode<K,V>)e).nextTable;
3341                          e = null;
3342 +                        pushState(t, i, n);
3343                          continue;
3344                      }
3345                      else if (e instanceof TreeBin)
# Line 3202 | Line 3347 | public class ConcurrentHashMap<K,V> impl
3347                      else
3348                          e = null;
3349                  }
3350 <                if ((index += baseSize) >= n)
3351 <                    index = ++baseIndex;    // visit upper slots if present
3350 >                if (stack != null)
3351 >                    recoverState(n);
3352 >                else if ((index = i + baseSize) >= n)
3353 >                    index = ++baseIndex; // visit upper slots if present
3354              }
3355          }
3356 +
3357 +        /**
3358 +         * Saves traversal state upon encountering a forwarding node.
3359 +         */
3360 +        private void pushState(Node<K,V>[] t, int i, int n) {
3361 +            TableStack<K,V> s = spare;  // reuse if possible
3362 +            if (s != null)
3363 +                spare = s.next;
3364 +            else
3365 +                s = new TableStack<K,V>();
3366 +            s.tab = t;
3367 +            s.length = n;
3368 +            s.index = i;
3369 +            s.next = stack;
3370 +            stack = s;
3371 +        }
3372 +
3373 +        /**
3374 +         * Possibly pops traversal state.
3375 +         *
3376 +         * @param n length of current table
3377 +         */
3378 +        private void recoverState(int n) {
3379 +            TableStack<K,V> s; int len;
3380 +            while ((s = stack) != null && (index += (len = s.length)) >= n) {
3381 +                n = len;
3382 +                index = s.index;
3383 +                tab = s.tab;
3384 +                s.tab = null;
3385 +                TableStack<K,V> next = s.next;
3386 +                s.next = spare; // save for reuse
3387 +                stack = next;
3388 +                spare = s;
3389 +            }
3390 +            if (s == null && (index += baseSize) >= n)
3391 +                index = ++baseIndex;
3392 +        }
3393      }
3394  
3395      /**
3396       * Base of key, value, and entry Iterators. Adds fields to
3397 <     * Traverser to support iterator.remove
3397 >     * Traverser to support iterator.remove.
3398       */
3399      static class BaseIterator<K,V> extends Traverser<K,V> {
3400          final ConcurrentHashMap<K,V> map;
# Line 3236 | Line 3420 | public class ConcurrentHashMap<K,V> impl
3420  
3421      static final class KeyIterator<K,V> extends BaseIterator<K,V>
3422          implements Iterator<K>, Enumeration<K> {
3423 <        KeyIterator(Node<K,V>[] tab, int index, int size, int limit,
3423 >        KeyIterator(Node<K,V>[] tab, int size, int index, int limit,
3424                      ConcurrentHashMap<K,V> map) {
3425 <            super(tab, index, size, limit, map);
3425 >            super(tab, size, index, limit, map);
3426          }
3427  
3428          public final K next() {
# Line 3256 | Line 3440 | public class ConcurrentHashMap<K,V> impl
3440  
3441      static final class ValueIterator<K,V> extends BaseIterator<K,V>
3442          implements Iterator<V>, Enumeration<V> {
3443 <        ValueIterator(Node<K,V>[] tab, int index, int size, int limit,
3443 >        ValueIterator(Node<K,V>[] tab, int size, int index, int limit,
3444                        ConcurrentHashMap<K,V> map) {
3445 <            super(tab, index, size, limit, map);
3445 >            super(tab, size, index, limit, map);
3446          }
3447  
3448          public final V next() {
# Line 3276 | Line 3460 | public class ConcurrentHashMap<K,V> impl
3460  
3461      static final class EntryIterator<K,V> extends BaseIterator<K,V>
3462          implements Iterator<Map.Entry<K,V>> {
3463 <        EntryIterator(Node<K,V>[] tab, int index, int size, int limit,
3463 >        EntryIterator(Node<K,V>[] tab, int size, int index, int limit,
3464                        ConcurrentHashMap<K,V> map) {
3465 <            super(tab, index, size, limit, map);
3465 >            super(tab, size, index, limit, map);
3466          }
3467  
3468          public final Map.Entry<K,V> next() {
# Line 3294 | Line 3478 | public class ConcurrentHashMap<K,V> impl
3478      }
3479  
3480      /**
3481 <     * Exported Entry for EntryIterator
3481 >     * Exported Entry for EntryIterator.
3482       */
3483      static final class MapEntry<K,V> implements Map.Entry<K,V> {
3484          final K key; // non-null
# Line 3308 | Line 3492 | public class ConcurrentHashMap<K,V> impl
3492          public K getKey()        { return key; }
3493          public V getValue()      { return val; }
3494          public int hashCode()    { return key.hashCode() ^ val.hashCode(); }
3495 <        public String toString() { return key + "=" + val; }
3495 >        public String toString() {
3496 >            return Helpers.mapEntryToString(key, val);
3497 >        }
3498  
3499          public boolean equals(Object o) {
3500              Object k, v; Map.Entry<?,?> e;
# Line 3345 | Line 3531 | public class ConcurrentHashMap<K,V> impl
3531              this.est = est;
3532          }
3533  
3534 <        public Spliterator<K> trySplit() {
3534 >        public KeySpliterator<K,V> trySplit() {
3535              int i, f, h;
3536              return (h = ((i = baseIndex) + (f = baseLimit)) >>> 1) <= i ? null :
3537                  new KeySpliterator<K,V>(tab, baseSize, baseLimit = h,
# Line 3384 | Line 3570 | public class ConcurrentHashMap<K,V> impl
3570              this.est = est;
3571          }
3572  
3573 <        public Spliterator<V> trySplit() {
3573 >        public ValueSpliterator<K,V> trySplit() {
3574              int i, f, h;
3575              return (h = ((i = baseIndex) + (f = baseLimit)) >>> 1) <= i ? null :
3576                  new ValueSpliterator<K,V>(tab, baseSize, baseLimit = h,
# Line 3424 | Line 3610 | public class ConcurrentHashMap<K,V> impl
3610              this.est = est;
3611          }
3612  
3613 <        public Spliterator<Map.Entry<K,V>> trySplit() {
3613 >        public EntrySpliterator<K,V> trySplit() {
3614              int i, f, h;
3615              return (h = ((i = baseIndex) + (f = baseLimit)) >>> 1) <= i ? null :
3616                  new EntrySpliterator<K,V>(tab, baseSize, baseLimit = h,
# Line 3498 | Line 3684 | public class ConcurrentHashMap<K,V> impl
3684       * for an element, or null if there is no transformation (in
3685       * which case the action is not applied)
3686       * @param action the action
3687 +     * @param <U> the return type of the transformer
3688       * @since 1.8
3689       */
3690      public <U> void forEach(long parallelismThreshold,
# Line 3521 | Line 3708 | public class ConcurrentHashMap<K,V> impl
3708       * needed for this operation to be executed in parallel
3709       * @param searchFunction a function returning a non-null
3710       * result on success, else null
3711 +     * @param <U> the return type of the search function
3712       * @return a non-null result from applying the given search
3713       * function on each (key, value), or null if none
3714       * @since 1.8
# Line 3544 | Line 3732 | public class ConcurrentHashMap<K,V> impl
3732       * for an element, or null if there is no transformation (in
3733       * which case it is not combined)
3734       * @param reducer a commutative associative combining function
3735 +     * @param <U> the return type of the transformer
3736       * @return the result of accumulating the given transformation
3737       * of all (key, value) pairs
3738       * @since 1.8
# Line 3573 | Line 3762 | public class ConcurrentHashMap<K,V> impl
3762       * of all (key, value) pairs
3763       * @since 1.8
3764       */
3765 <    public double reduceToDoubleIn(long parallelismThreshold,
3766 <                                   ToDoubleBiFunction<? super K, ? super V> transformer,
3767 <                                   double basis,
3768 <                                   DoubleBinaryOperator reducer) {
3765 >    public double reduceToDouble(long parallelismThreshold,
3766 >                                 ToDoubleBiFunction<? super K, ? super V> transformer,
3767 >                                 double basis,
3768 >                                 DoubleBinaryOperator reducer) {
3769          if (transformer == null || reducer == null)
3770              throw new NullPointerException();
3771          return new MapReduceMappingsToDoubleTask<K,V>
# Line 3662 | Line 3851 | public class ConcurrentHashMap<K,V> impl
3851       * for an element, or null if there is no transformation (in
3852       * which case the action is not applied)
3853       * @param action the action
3854 +     * @param <U> the return type of the transformer
3855       * @since 1.8
3856       */
3857      public <U> void forEachKey(long parallelismThreshold,
# Line 3685 | Line 3875 | public class ConcurrentHashMap<K,V> impl
3875       * needed for this operation to be executed in parallel
3876       * @param searchFunction a function returning a non-null
3877       * result on success, else null
3878 +     * @param <U> the return type of the search function
3879       * @return a non-null result from applying the given search
3880       * function on each key, or null if none
3881       * @since 1.8
# Line 3727 | Line 3918 | public class ConcurrentHashMap<K,V> impl
3918       * for an element, or null if there is no transformation (in
3919       * which case it is not combined)
3920       * @param reducer a commutative associative combining function
3921 +     * @param <U> the return type of the transformer
3922       * @return the result of accumulating the given transformation
3923       * of all keys
3924       * @since 1.8
# Line 3846 | Line 4038 | public class ConcurrentHashMap<K,V> impl
4038       * for an element, or null if there is no transformation (in
4039       * which case the action is not applied)
4040       * @param action the action
4041 +     * @param <U> the return type of the transformer
4042       * @since 1.8
4043       */
4044      public <U> void forEachValue(long parallelismThreshold,
# Line 3869 | Line 4062 | public class ConcurrentHashMap<K,V> impl
4062       * needed for this operation to be executed in parallel
4063       * @param searchFunction a function returning a non-null
4064       * result on success, else null
4065 +     * @param <U> the return type of the search function
4066       * @return a non-null result from applying the given search
4067       * function on each value, or null if none
4068       * @since 1.8
# Line 3910 | Line 4104 | public class ConcurrentHashMap<K,V> impl
4104       * for an element, or null if there is no transformation (in
4105       * which case it is not combined)
4106       * @param reducer a commutative associative combining function
4107 +     * @param <U> the return type of the transformer
4108       * @return the result of accumulating the given transformation
4109       * of all values
4110       * @since 1.8
# Line 4027 | Line 4222 | public class ConcurrentHashMap<K,V> impl
4222       * for an element, or null if there is no transformation (in
4223       * which case the action is not applied)
4224       * @param action the action
4225 +     * @param <U> the return type of the transformer
4226       * @since 1.8
4227       */
4228      public <U> void forEachEntry(long parallelismThreshold,
# Line 4050 | Line 4246 | public class ConcurrentHashMap<K,V> impl
4246       * needed for this operation to be executed in parallel
4247       * @param searchFunction a function returning a non-null
4248       * result on success, else null
4249 +     * @param <U> the return type of the search function
4250       * @return a non-null result from applying the given search
4251       * function on each entry, or null if none
4252       * @since 1.8
# Line 4091 | Line 4288 | public class ConcurrentHashMap<K,V> impl
4288       * for an element, or null if there is no transformation (in
4289       * which case it is not combined)
4290       * @param reducer a commutative associative combining function
4291 +     * @param <U> the return type of the transformer
4292       * @return the result of accumulating the given transformation
4293       * of all entries
4294       * @since 1.8
# Line 4213 | Line 4411 | public class ConcurrentHashMap<K,V> impl
4411          // implementations below rely on concrete classes supplying these
4412          // abstract methods
4413          /**
4414 <         * Returns a "weakly consistent" iterator that will never
4415 <         * throw {@link ConcurrentModificationException}, and
4416 <         * guarantees to traverse elements as they existed upon
4417 <         * construction of the iterator, and may (but is not
4418 <         * guaranteed to) reflect any modifications subsequent to
4419 <         * construction.
4414 >         * Returns an iterator over the elements in this collection.
4415 >         *
4416 >         * <p>The returned iterator is
4417 >         * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>.
4418 >         *
4419 >         * @return an iterator over the elements in this collection
4420           */
4421          public abstract Iterator<E> iterator();
4422          public abstract boolean contains(Object o);
4423          public abstract boolean remove(Object o);
4424  
4425 <        private static final String oomeMsg = "Required array size too large";
4425 >        private static final String OOME_MSG = "Required array size too large";
4426  
4427          public final Object[] toArray() {
4428              long sz = map.mappingCount();
4429              if (sz > MAX_ARRAY_SIZE)
4430 <                throw new OutOfMemoryError(oomeMsg);
4430 >                throw new OutOfMemoryError(OOME_MSG);
4431              int n = (int)sz;
4432              Object[] r = new Object[n];
4433              int i = 0;
4434              for (E e : this) {
4435                  if (i == n) {
4436                      if (n >= MAX_ARRAY_SIZE)
4437 <                        throw new OutOfMemoryError(oomeMsg);
4437 >                        throw new OutOfMemoryError(OOME_MSG);
4438                      if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
4439                          n = MAX_ARRAY_SIZE;
4440                      else
# Line 4252 | Line 4450 | public class ConcurrentHashMap<K,V> impl
4450          public final <T> T[] toArray(T[] a) {
4451              long sz = map.mappingCount();
4452              if (sz > MAX_ARRAY_SIZE)
4453 <                throw new OutOfMemoryError(oomeMsg);
4453 >                throw new OutOfMemoryError(OOME_MSG);
4454              int m = (int)sz;
4455              T[] r = (a.length >= m) ? a :
4456                  (T[])java.lang.reflect.Array
# Line 4262 | Line 4460 | public class ConcurrentHashMap<K,V> impl
4460              for (E e : this) {
4461                  if (i == n) {
4462                      if (n >= MAX_ARRAY_SIZE)
4463 <                        throw new OutOfMemoryError(oomeMsg);
4463 >                        throw new OutOfMemoryError(OOME_MSG);
4464                      if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
4465                          n = MAX_ARRAY_SIZE;
4466                      else
# Line 4315 | Line 4513 | public class ConcurrentHashMap<K,V> impl
4513              return true;
4514          }
4515  
4516 <        public final boolean removeAll(Collection<?> c) {
4516 >        public boolean removeAll(Collection<?> c) {
4517 >            if (c == null) throw new NullPointerException();
4518              boolean modified = false;
4519 <            for (Iterator<E> it = iterator(); it.hasNext();) {
4520 <                if (c.contains(it.next())) {
4521 <                    it.remove();
4522 <                    modified = true;
4519 >            // Use (c instanceof Set) as a hint that lookup in c is as
4520 >            // efficient as this view
4521 >            Node<K,V>[] t;
4522 >            if ((t = map.table) == null) {
4523 >                return false;
4524 >            } else if (c instanceof Set<?> && c.size() > t.length) {
4525 >                for (Iterator<?> it = iterator(); it.hasNext(); ) {
4526 >                    if (c.contains(it.next())) {
4527 >                        it.remove();
4528 >                        modified = true;
4529 >                    }
4530                  }
4531 +            } else {
4532 +                for (Object e : c)
4533 +                    modified |= remove(e);
4534              }
4535              return modified;
4536          }
4537  
4538          public final boolean retainAll(Collection<?> c) {
4539 +            if (c == null) throw new NullPointerException();
4540              boolean modified = false;
4541              for (Iterator<E> it = iterator(); it.hasNext();) {
4542                  if (!c.contains(it.next())) {
# Line 4507 | Line 4717 | public class ConcurrentHashMap<K,V> impl
4717              throw new UnsupportedOperationException();
4718          }
4719  
4720 +        @Override public boolean removeAll(Collection<?> c) {
4721 +            if (c == null) throw new NullPointerException();
4722 +            boolean modified = false;
4723 +            for (Iterator<V> it = iterator(); it.hasNext();) {
4724 +                if (c.contains(it.next())) {
4725 +                    it.remove();
4726 +                    modified = true;
4727 +                }
4728 +            }
4729 +            return modified;
4730 +        }
4731 +
4732 +        public boolean removeIf(Predicate<? super V> filter) {
4733 +            return map.removeValueIf(filter);
4734 +        }
4735 +
4736          public Spliterator<V> spliterator() {
4737              Node<K,V>[] t;
4738              ConcurrentHashMap<K,V> m = map;
# Line 4576 | Line 4802 | public class ConcurrentHashMap<K,V> impl
4802              return added;
4803          }
4804  
4805 +        public boolean removeIf(Predicate<? super Entry<K,V>> filter) {
4806 +            return map.removeEntryIf(filter);
4807 +        }
4808 +
4809          public final int hashCode() {
4810              int h = 0;
4811              Node<K,V>[] t;
# Line 4621 | Line 4851 | public class ConcurrentHashMap<K,V> impl
4851       * Base class for bulk tasks. Repeats some fields and code from
4852       * class Traverser, because we need to subclass CountedCompleter.
4853       */
4854 +    @SuppressWarnings("serial")
4855      abstract static class BulkTask<K,V,R> extends CountedCompleter<R> {
4856          Node<K,V>[] tab;        // same as Traverser
4857          Node<K,V> next;
4858 +        TableStack<K,V> stack, spare;
4859          int index;
4860          int baseIndex;
4861          int baseLimit;
# Line 4645 | Line 4877 | public class ConcurrentHashMap<K,V> impl
4877          }
4878  
4879          /**
4880 <         * Same as Traverser version
4880 >         * Same as Traverser version.
4881           */
4882          final Node<K,V> advance() {
4883              Node<K,V> e;
4884              if ((e = next) != null)
4885                  e = e.next;
4886              for (;;) {
4887 <                Node<K,V>[] t; int i, n; K ek;  // must use locals in checks
4887 >                Node<K,V>[] t; int i, n;
4888                  if (e != null)
4889                      return next = e;
4890                  if (baseIndex >= baseLimit || (t = tab) == null ||
4891                      (n = t.length) <= (i = index) || i < 0)
4892                      return next = null;
4893 <                if ((e = tabAt(t, index)) != null && e.hash < 0) {
4893 >                if ((e = tabAt(t, i)) != null && e.hash < 0) {
4894                      if (e instanceof ForwardingNode) {
4895                          tab = ((ForwardingNode<K,V>)e).nextTable;
4896                          e = null;
4897 +                        pushState(t, i, n);
4898                          continue;
4899                      }
4900                      else if (e instanceof TreeBin)
# Line 4669 | Line 4902 | public class ConcurrentHashMap<K,V> impl
4902                      else
4903                          e = null;
4904                  }
4905 <                if ((index += baseSize) >= n)
4906 <                    index = ++baseIndex;    // visit upper slots if present
4905 >                if (stack != null)
4906 >                    recoverState(n);
4907 >                else if ((index = i + baseSize) >= n)
4908 >                    index = ++baseIndex;
4909 >            }
4910 >        }
4911 >
4912 >        private void pushState(Node<K,V>[] t, int i, int n) {
4913 >            TableStack<K,V> s = spare;
4914 >            if (s != null)
4915 >                spare = s.next;
4916 >            else
4917 >                s = new TableStack<K,V>();
4918 >            s.tab = t;
4919 >            s.length = n;
4920 >            s.index = i;
4921 >            s.next = stack;
4922 >            stack = s;
4923 >        }
4924 >
4925 >        private void recoverState(int n) {
4926 >            TableStack<K,V> s; int len;
4927 >            while ((s = stack) != null && (index += (len = s.length)) >= n) {
4928 >                n = len;
4929 >                index = s.index;
4930 >                tab = s.tab;
4931 >                s.tab = null;
4932 >                TableStack<K,V> next = s.next;
4933 >                s.next = spare; // save for reuse
4934 >                stack = next;
4935 >                spare = s;
4936              }
4937 +            if (s == null && (index += baseSize) >= n)
4938 +                index = ++baseIndex;
4939          }
4940      }
4941  
# Line 5131 | Line 5395 | public class ConcurrentHashMap<K,V> impl
5395                  result = r;
5396                  CountedCompleter<?> c;
5397                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5398 <                    @SuppressWarnings("unchecked") ReduceKeysTask<K,V>
5398 >                    @SuppressWarnings("unchecked")
5399 >                    ReduceKeysTask<K,V>
5400                          t = (ReduceKeysTask<K,V>)c,
5401                          s = t.rights;
5402                      while (s != null) {
# Line 5178 | Line 5443 | public class ConcurrentHashMap<K,V> impl
5443                  result = r;
5444                  CountedCompleter<?> c;
5445                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5446 <                    @SuppressWarnings("unchecked") ReduceValuesTask<K,V>
5446 >                    @SuppressWarnings("unchecked")
5447 >                    ReduceValuesTask<K,V>
5448                          t = (ReduceValuesTask<K,V>)c,
5449                          s = t.rights;
5450                      while (s != null) {
# Line 5223 | Line 5489 | public class ConcurrentHashMap<K,V> impl
5489                  result = r;
5490                  CountedCompleter<?> c;
5491                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5492 <                    @SuppressWarnings("unchecked") ReduceEntriesTask<K,V>
5492 >                    @SuppressWarnings("unchecked")
5493 >                    ReduceEntriesTask<K,V>
5494                          t = (ReduceEntriesTask<K,V>)c,
5495                          s = t.rights;
5496                      while (s != null) {
# Line 5276 | Line 5543 | public class ConcurrentHashMap<K,V> impl
5543                  result = r;
5544                  CountedCompleter<?> c;
5545                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5546 <                    @SuppressWarnings("unchecked") MapReduceKeysTask<K,V,U>
5546 >                    @SuppressWarnings("unchecked")
5547 >                    MapReduceKeysTask<K,V,U>
5548                          t = (MapReduceKeysTask<K,V,U>)c,
5549                          s = t.rights;
5550                      while (s != null) {
# Line 5329 | Line 5597 | public class ConcurrentHashMap<K,V> impl
5597                  result = r;
5598                  CountedCompleter<?> c;
5599                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5600 <                    @SuppressWarnings("unchecked") MapReduceValuesTask<K,V,U>
5600 >                    @SuppressWarnings("unchecked")
5601 >                    MapReduceValuesTask<K,V,U>
5602                          t = (MapReduceValuesTask<K,V,U>)c,
5603                          s = t.rights;
5604                      while (s != null) {
# Line 5382 | Line 5651 | public class ConcurrentHashMap<K,V> impl
5651                  result = r;
5652                  CountedCompleter<?> c;
5653                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5654 <                    @SuppressWarnings("unchecked") MapReduceEntriesTask<K,V,U>
5654 >                    @SuppressWarnings("unchecked")
5655 >                    MapReduceEntriesTask<K,V,U>
5656                          t = (MapReduceEntriesTask<K,V,U>)c,
5657                          s = t.rights;
5658                      while (s != null) {
# Line 5435 | Line 5705 | public class ConcurrentHashMap<K,V> impl
5705                  result = r;
5706                  CountedCompleter<?> c;
5707                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5708 <                    @SuppressWarnings("unchecked") MapReduceMappingsTask<K,V,U>
5708 >                    @SuppressWarnings("unchecked")
5709 >                    MapReduceMappingsTask<K,V,U>
5710                          t = (MapReduceMappingsTask<K,V,U>)c,
5711                          s = t.rights;
5712                      while (s != null) {
# Line 5487 | Line 5758 | public class ConcurrentHashMap<K,V> impl
5758                  result = r;
5759                  CountedCompleter<?> c;
5760                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5761 <                    @SuppressWarnings("unchecked") MapReduceKeysToDoubleTask<K,V>
5761 >                    @SuppressWarnings("unchecked")
5762 >                    MapReduceKeysToDoubleTask<K,V>
5763                          t = (MapReduceKeysToDoubleTask<K,V>)c,
5764                          s = t.rights;
5765                      while (s != null) {
# Line 5536 | Line 5808 | public class ConcurrentHashMap<K,V> impl
5808                  result = r;
5809                  CountedCompleter<?> c;
5810                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5811 <                    @SuppressWarnings("unchecked") MapReduceValuesToDoubleTask<K,V>
5811 >                    @SuppressWarnings("unchecked")
5812 >                    MapReduceValuesToDoubleTask<K,V>
5813                          t = (MapReduceValuesToDoubleTask<K,V>)c,
5814                          s = t.rights;
5815                      while (s != null) {
# Line 5585 | Line 5858 | public class ConcurrentHashMap<K,V> impl
5858                  result = r;
5859                  CountedCompleter<?> c;
5860                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5861 <                    @SuppressWarnings("unchecked") MapReduceEntriesToDoubleTask<K,V>
5861 >                    @SuppressWarnings("unchecked")
5862 >                    MapReduceEntriesToDoubleTask<K,V>
5863                          t = (MapReduceEntriesToDoubleTask<K,V>)c,
5864                          s = t.rights;
5865                      while (s != null) {
# Line 5634 | Line 5908 | public class ConcurrentHashMap<K,V> impl
5908                  result = r;
5909                  CountedCompleter<?> c;
5910                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5911 <                    @SuppressWarnings("unchecked") MapReduceMappingsToDoubleTask<K,V>
5911 >                    @SuppressWarnings("unchecked")
5912 >                    MapReduceMappingsToDoubleTask<K,V>
5913                          t = (MapReduceMappingsToDoubleTask<K,V>)c,
5914                          s = t.rights;
5915                      while (s != null) {
# Line 5683 | Line 5958 | public class ConcurrentHashMap<K,V> impl
5958                  result = r;
5959                  CountedCompleter<?> c;
5960                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5961 <                    @SuppressWarnings("unchecked") MapReduceKeysToLongTask<K,V>
5961 >                    @SuppressWarnings("unchecked")
5962 >                    MapReduceKeysToLongTask<K,V>
5963                          t = (MapReduceKeysToLongTask<K,V>)c,
5964                          s = t.rights;
5965                      while (s != null) {
# Line 5732 | Line 6008 | public class ConcurrentHashMap<K,V> impl
6008                  result = r;
6009                  CountedCompleter<?> c;
6010                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6011 <                    @SuppressWarnings("unchecked") MapReduceValuesToLongTask<K,V>
6011 >                    @SuppressWarnings("unchecked")
6012 >                    MapReduceValuesToLongTask<K,V>
6013                          t = (MapReduceValuesToLongTask<K,V>)c,
6014                          s = t.rights;
6015                      while (s != null) {
# Line 5781 | Line 6058 | public class ConcurrentHashMap<K,V> impl
6058                  result = r;
6059                  CountedCompleter<?> c;
6060                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6061 <                    @SuppressWarnings("unchecked") MapReduceEntriesToLongTask<K,V>
6061 >                    @SuppressWarnings("unchecked")
6062 >                    MapReduceEntriesToLongTask<K,V>
6063                          t = (MapReduceEntriesToLongTask<K,V>)c,
6064                          s = t.rights;
6065                      while (s != null) {
# Line 5830 | Line 6108 | public class ConcurrentHashMap<K,V> impl
6108                  result = r;
6109                  CountedCompleter<?> c;
6110                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6111 <                    @SuppressWarnings("unchecked") MapReduceMappingsToLongTask<K,V>
6111 >                    @SuppressWarnings("unchecked")
6112 >                    MapReduceMappingsToLongTask<K,V>
6113                          t = (MapReduceMappingsToLongTask<K,V>)c,
6114                          s = t.rights;
6115                      while (s != null) {
# Line 5879 | Line 6158 | public class ConcurrentHashMap<K,V> impl
6158                  result = r;
6159                  CountedCompleter<?> c;
6160                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6161 <                    @SuppressWarnings("unchecked") MapReduceKeysToIntTask<K,V>
6161 >                    @SuppressWarnings("unchecked")
6162 >                    MapReduceKeysToIntTask<K,V>
6163                          t = (MapReduceKeysToIntTask<K,V>)c,
6164                          s = t.rights;
6165                      while (s != null) {
# Line 5928 | Line 6208 | public class ConcurrentHashMap<K,V> impl
6208                  result = r;
6209                  CountedCompleter<?> c;
6210                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6211 <                    @SuppressWarnings("unchecked") MapReduceValuesToIntTask<K,V>
6211 >                    @SuppressWarnings("unchecked")
6212 >                    MapReduceValuesToIntTask<K,V>
6213                          t = (MapReduceValuesToIntTask<K,V>)c,
6214                          s = t.rights;
6215                      while (s != null) {
# Line 5977 | Line 6258 | public class ConcurrentHashMap<K,V> impl
6258                  result = r;
6259                  CountedCompleter<?> c;
6260                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6261 <                    @SuppressWarnings("unchecked") MapReduceEntriesToIntTask<K,V>
6261 >                    @SuppressWarnings("unchecked")
6262 >                    MapReduceEntriesToIntTask<K,V>
6263                          t = (MapReduceEntriesToIntTask<K,V>)c,
6264                          s = t.rights;
6265                      while (s != null) {
# Line 6026 | Line 6308 | public class ConcurrentHashMap<K,V> impl
6308                  result = r;
6309                  CountedCompleter<?> c;
6310                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6311 <                    @SuppressWarnings("unchecked") MapReduceMappingsToIntTask<K,V>
6311 >                    @SuppressWarnings("unchecked")
6312 >                    MapReduceMappingsToIntTask<K,V>
6313                          t = (MapReduceMappingsToIntTask<K,V>)c,
6314                          s = t.rights;
6315                      while (s != null) {
# Line 6039 | Line 6322 | public class ConcurrentHashMap<K,V> impl
6322      }
6323  
6324      // Unsafe mechanics
6325 <    private static final sun.misc.Unsafe U;
6325 >    private static final Unsafe U = Unsafe.getUnsafe();
6326      private static final long SIZECTL;
6327      private static final long TRANSFERINDEX;
6045    private static final long TRANSFERORIGIN;
6328      private static final long BASECOUNT;
6329      private static final long CELLSBUSY;
6330      private static final long CELLVALUE;
6331 <    private static final long ABASE;
6331 >    private static final int ABASE;
6332      private static final int ASHIFT;
6333  
6334      static {
6335          try {
6054            U = sun.misc.Unsafe.getUnsafe();
6055            Class<?> k = ConcurrentHashMap.class;
6336              SIZECTL = U.objectFieldOffset
6337 <                (k.getDeclaredField("sizeCtl"));
6337 >                (ConcurrentHashMap.class.getDeclaredField("sizeCtl"));
6338              TRANSFERINDEX = U.objectFieldOffset
6339 <                (k.getDeclaredField("transferIndex"));
6060 <            TRANSFERORIGIN = U.objectFieldOffset
6061 <                (k.getDeclaredField("transferOrigin"));
6339 >                (ConcurrentHashMap.class.getDeclaredField("transferIndex"));
6340              BASECOUNT = U.objectFieldOffset
6341 <                (k.getDeclaredField("baseCount"));
6341 >                (ConcurrentHashMap.class.getDeclaredField("baseCount"));
6342              CELLSBUSY = U.objectFieldOffset
6343 <                (k.getDeclaredField("cellsBusy"));
6344 <            Class<?> ck = CounterCell.class;
6343 >                (ConcurrentHashMap.class.getDeclaredField("cellsBusy"));
6344 >
6345              CELLVALUE = U.objectFieldOffset
6346 <                (ck.getDeclaredField("value"));
6347 <            Class<?> ak = Node[].class;
6348 <            ABASE = U.arrayBaseOffset(ak);
6349 <            int scale = U.arrayIndexScale(ak);
6346 >                (CounterCell.class.getDeclaredField("value"));
6347 >
6348 >            ABASE = U.arrayBaseOffset(Node[].class);
6349 >            int scale = U.arrayIndexScale(Node[].class);
6350              if ((scale & (scale - 1)) != 0)
6351 <                throw new Error("data type scale not a power of two");
6351 >                throw new ExceptionInInitializerError("array index scale not a power of two");
6352              ASHIFT = 31 - Integer.numberOfLeadingZeros(scale);
6353 <        } catch (Exception e) {
6354 <            throw new Error(e);
6353 >        } catch (ReflectiveOperationException e) {
6354 >            throw new ExceptionInInitializerError(e);
6355          }
6356 +
6357 +        // Reduce the risk of rare disastrous classloading in first call to
6358 +        // LockSupport.park: https://bugs.openjdk.java.net/browse/JDK-8074773
6359 +        Class<?> ensureLoaded = LockSupport.class;
6360 +
6361 +        // Eager class load observed to help JIT during startup
6362 +        ensureLoaded = ReservationNode.class;
6363      }
6364   }

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines