ViewVC Help
View File | Revision Log | Show Annotations | Download File | Root Listing
root/jsr166/jsr166/src/main/java/util/concurrent/ConcurrentHashMap.java
(Generate patch)

Comparing jsr166/src/main/java/util/concurrent/ConcurrentHashMap.java (file contents):
Revision 1.228 by jsr166, Tue Jun 18 18:39:14 2013 UTC vs.
Revision 1.307 by jsr166, Sun Mar 11 18:00:05 2018 UTC

# Line 10 | Line 10 | import java.io.ObjectStreamField;
10   import java.io.Serializable;
11   import java.lang.reflect.ParameterizedType;
12   import java.lang.reflect.Type;
13 + import java.util.AbstractMap;
14   import java.util.Arrays;
15   import java.util.Collection;
15 import java.util.Comparator;
16 import java.util.ConcurrentModificationException;
16   import java.util.Enumeration;
17   import java.util.HashMap;
18   import java.util.Hashtable;
# Line 22 | Line 21 | import java.util.Map;
21   import java.util.NoSuchElementException;
22   import java.util.Set;
23   import java.util.Spliterator;
25 import java.util.concurrent.ConcurrentMap;
26 import java.util.concurrent.ForkJoinPool;
24   import java.util.concurrent.atomic.AtomicReference;
25   import java.util.concurrent.locks.LockSupport;
26   import java.util.concurrent.locks.ReentrantLock;
27   import java.util.function.BiConsumer;
28   import java.util.function.BiFunction;
32 import java.util.function.BinaryOperator;
29   import java.util.function.Consumer;
30   import java.util.function.DoubleBinaryOperator;
31   import java.util.function.Function;
32   import java.util.function.IntBinaryOperator;
33   import java.util.function.LongBinaryOperator;
34 + import java.util.function.Predicate;
35   import java.util.function.ToDoubleBiFunction;
36   import java.util.function.ToDoubleFunction;
37   import java.util.function.ToIntBiFunction;
# Line 42 | Line 39 | import java.util.function.ToIntFunction;
39   import java.util.function.ToLongBiFunction;
40   import java.util.function.ToLongFunction;
41   import java.util.stream.Stream;
42 + import jdk.internal.misc.Unsafe;
43  
44   /**
45   * A hash table supporting full concurrency of retrievals and
# Line 64 | Line 62 | import java.util.stream.Stream;
62   * that key reporting the updated value.)  For aggregate operations
63   * such as {@code putAll} and {@code clear}, concurrent retrievals may
64   * reflect insertion or removal of only some entries.  Similarly,
65 < * Iterators and Enumerations return elements reflecting the state of
66 < * the hash table at some point at or since the creation of the
65 > * Iterators, Spliterators and Enumerations return elements reflecting the
66 > * state of the hash table at some point at or since the creation of the
67   * iterator/enumeration.  They do <em>not</em> throw {@link
68 < * ConcurrentModificationException}.  However, iterators are designed
69 < * to be used by only one thread at a time.  Bear in mind that the
70 < * results of aggregate status methods including {@code size}, {@code
71 < * isEmpty}, and {@code containsValue} are typically useful only when
72 < * a map is not undergoing concurrent updates in other threads.
68 > * java.util.ConcurrentModificationException ConcurrentModificationException}.
69 > * However, iterators are designed to be used by only one thread at a time.
70 > * Bear in mind that the results of aggregate status methods including
71 > * {@code size}, {@code isEmpty}, and {@code containsValue} are typically
72 > * useful only when a map is not undergoing concurrent updates in other threads.
73   * Otherwise the results of these methods reflect transient states
74   * that may be adequate for monitoring or estimation purposes, but not
75   * for program control.
# Line 104 | Line 102 | import java.util.stream.Stream;
102   * mapped values are (perhaps transiently) not used or all take the
103   * same mapping value.
104   *
105 < * <p>A ConcurrentHashMap can be used as scalable frequency map (a
105 > * <p>A ConcurrentHashMap can be used as a scalable frequency map (a
106   * form of histogram or multiset) by using {@link
107   * java.util.concurrent.atomic.LongAdder} values and initializing via
108   * {@link #computeIfAbsent computeIfAbsent}. For example, to add a count
109   * to a {@code ConcurrentHashMap<String,LongAdder> freqs}, you can use
110 < * {@code freqs.computeIfAbsent(k -> new LongAdder()).increment();}
110 > * {@code freqs.computeIfAbsent(key, k -> new LongAdder()).increment();}
111   *
112   * <p>This class and its views and iterators implement all of the
113   * <em>optional</em> methods of the {@link Map} and {@link Iterator}
# Line 124 | Line 122 | import java.util.stream.Stream;
122   * being concurrently updated by other threads; for example, when
123   * computing a snapshot summary of the values in a shared registry.
124   * There are three kinds of operation, each with four forms, accepting
125 < * functions with Keys, Values, Entries, and (Key, Value) arguments
126 < * and/or return values. Because the elements of a ConcurrentHashMap
127 < * are not ordered in any particular way, and may be processed in
128 < * different orders in different parallel executions, the correctness
129 < * of supplied functions should not depend on any ordering, or on any
130 < * other objects or values that may transiently change while
131 < * computation is in progress; and except for forEach actions, should
132 < * ideally be side-effect-free. Bulk operations on {@link Map.Entry}
133 < * objects do not support method {@code setValue}.
125 > * functions with keys, values, entries, and (key, value) pairs as
126 > * arguments and/or return values. Because the elements of a
127 > * ConcurrentHashMap are not ordered in any particular way, and may be
128 > * processed in different orders in different parallel executions, the
129 > * correctness of supplied functions should not depend on any
130 > * ordering, or on any other objects or values that may transiently
131 > * change while computation is in progress; and except for forEach
132 > * actions, should ideally be side-effect-free. Bulk operations on
133 > * {@link Map.Entry} objects do not support method {@code setValue}.
134   *
135   * <ul>
136 < * <li> forEach: Perform a given action on each element.
136 > * <li>forEach: Performs a given action on each element.
137   * A variant form applies a given transformation on each element
138 < * before performing the action.</li>
138 > * before performing the action.
139   *
140 < * <li> search: Return the first available non-null result of
140 > * <li>search: Returns the first available non-null result of
141   * applying a given function on each element; skipping further
142 < * search when a result is found.</li>
142 > * search when a result is found.
143   *
144 < * <li> reduce: Accumulate each element.  The supplied reduction
144 > * <li>reduce: Accumulates each element.  The supplied reduction
145   * function cannot rely on ordering (more formally, it should be
146   * both associative and commutative).  There are five variants:
147   *
148   * <ul>
149   *
150 < * <li> Plain reductions. (There is not a form of this method for
150 > * <li>Plain reductions. (There is not a form of this method for
151   * (key, value) function arguments since there is no corresponding
152 < * return type.)</li>
152 > * return type.)
153   *
154 < * <li> Mapped reductions that accumulate the results of a given
155 < * function applied to each element.</li>
154 > * <li>Mapped reductions that accumulate the results of a given
155 > * function applied to each element.
156   *
157 < * <li> Reductions to scalar doubles, longs, and ints, using a
158 < * given basis value.</li>
157 > * <li>Reductions to scalar doubles, longs, and ints, using a
158 > * given basis value.
159   *
160   * </ul>
163 * </li>
161   * </ul>
162   *
163   * <p>These bulk operations accept a {@code parallelismThreshold}
# Line 227 | Line 224 | import java.util.stream.Stream;
224   * <p>All arguments to all task methods must be non-null.
225   *
226   * <p>This class is a member of the
227 < * <a href="{@docRoot}/../technotes/guides/collections/index.html">
227 > * <a href="{@docRoot}/java/util/package-summary.html#CollectionsFramework">
228   * Java Collections Framework</a>.
229   *
230   * @since 1.5
# Line 235 | Line 232 | import java.util.stream.Stream;
232   * @param <K> the type of keys maintained by this map
233   * @param <V> the type of mapped values
234   */
235 < public class ConcurrentHashMap<K,V> implements ConcurrentMap<K,V>, Serializable {
235 > public class ConcurrentHashMap<K,V> extends AbstractMap<K,V>
236 >    implements ConcurrentMap<K,V>, Serializable {
237      private static final long serialVersionUID = 7249069246763182397L;
238  
239      /*
# Line 262 | Line 260 | public class ConcurrentHashMap<K,V> impl
260       * because they have negative hash fields and null key and value
261       * fields. (These special nodes are either uncommon or transient,
262       * so the impact of carrying around some unused fields is
263 <     * insignficant.)
263 >     * insignificant.)
264       *
265       * The table is lazily initialized to a power-of-two size upon the
266       * first insertion.  Each bin in the table normally contains a
# Line 270 | Line 268 | public class ConcurrentHashMap<K,V> impl
268       * Table accesses require volatile/atomic reads, writes, and
269       * CASes.  Because there is no other way to arrange this without
270       * adding further indirections, we use intrinsics
271 <     * (sun.misc.Unsafe) operations.
271 >     * (jdk.internal.misc.Unsafe) operations.
272       *
273       * We use the top (sign) bit of Node hash fields for control
274       * purposes -- it is available anyway because of addressing
# Line 343 | Line 341 | public class ConcurrentHashMap<K,V> impl
341       * The table is resized when occupancy exceeds a percentage
342       * threshold (nominally, 0.75, but see below).  Any thread
343       * noticing an overfull bin may assist in resizing after the
344 <     * initiating thread allocates and sets up the replacement
345 <     * array. However, rather than stalling, these other threads may
346 <     * proceed with insertions etc.  The use of TreeBins shields us
347 <     * from the worst case effects of overfilling while resizes are in
344 >     * initiating thread allocates and sets up the replacement array.
345 >     * However, rather than stalling, these other threads may proceed
346 >     * with insertions etc.  The use of TreeBins shields us from the
347 >     * worst case effects of overfilling while resizes are in
348       * progress.  Resizing proceeds by transferring bins, one by one,
349 <     * from the table to the next table. To enable concurrency, the
350 <     * next table must be (incrementally) prefilled with place-holders
351 <     * serving as reverse forwarders to the old table.  Because we are
349 >     * from the table to the next table. However, threads claim small
350 >     * blocks of indices to transfer (via field transferIndex) before
351 >     * doing so, reducing contention.  A generation stamp in field
352 >     * sizeCtl ensures that resizings do not overlap. Because we are
353       * using power-of-two expansion, the elements from each bin must
354       * either stay at same index, or move with a power of two
355       * offset. We eliminate unnecessary node creation by catching
# Line 371 | Line 370 | public class ConcurrentHashMap<K,V> impl
370       * locks, average aggregate waits become shorter as resizing
371       * progresses.  The transfer operation must also ensure that all
372       * accessible bins in both the old and new table are usable by any
373 <     * traversal.  This is arranged by proceeding from the last bin
374 <     * (table.length - 1) up towards the first.  Upon seeing a
375 <     * forwarding node, traversals (see class Traverser) arrange to
376 <     * move to the new table without revisiting nodes.  However, to
377 <     * ensure that no intervening nodes are skipped, bin splitting can
378 <     * only begin after the associated reverse-forwarders are in
379 <     * place.
373 >     * traversal.  This is arranged in part by proceeding from the
374 >     * last bin (table.length - 1) up towards the first.  Upon seeing
375 >     * a forwarding node, traversals (see class Traverser) arrange to
376 >     * move to the new table without revisiting nodes.  To ensure that
377 >     * no intervening nodes are skipped even when moved out of order,
378 >     * a stack (see class TableStack) is created on first encounter of
379 >     * a forwarding node during a traversal, to maintain its place if
380 >     * later processing the current table. The need for these
381 >     * save/restore mechanics is relatively rare, but when one
382 >     * forwarding node is encountered, typically many more will be.
383 >     * So Traversers use a simple caching scheme to avoid creating so
384 >     * many new TableStack nodes. (Thanks to Peter Levart for
385 >     * suggesting use of a stack here.)
386       *
387       * The traversal scheme also applies to partial traversals of
388       * ranges of bins (via an alternate Traverser constructor)
# Line 409 | Line 414 | public class ConcurrentHashMap<K,V> impl
414       * related operations (which is the main reason we cannot use
415       * existing collections such as TreeMaps). TreeBins contain
416       * Comparable elements, but may contain others, as well as
417 <     * elements that are Comparable but not necessarily Comparable
418 <     * for the same T, so we cannot invoke compareTo among them. To
419 <     * handle this, the tree is ordered primarily by hash value, then
420 <     * by Comparable.compareTo order if applicable.  On lookup at a
421 <     * node, if elements are not comparable or compare as 0 then both
422 <     * left and right children may need to be searched in the case of
423 <     * tied hash values. (This corresponds to the full list search
424 <     * that would be necessary if all elements were non-Comparable and
425 <     * had tied hashes.)  The red-black balancing code is updated from
426 <     * pre-jdk-collections
417 >     * elements that are Comparable but not necessarily Comparable for
418 >     * the same T, so we cannot invoke compareTo among them. To handle
419 >     * this, the tree is ordered primarily by hash value, then by
420 >     * Comparable.compareTo order if applicable.  On lookup at a node,
421 >     * if elements are not comparable or compare as 0 then both left
422 >     * and right children may need to be searched in the case of tied
423 >     * hash values. (This corresponds to the full list search that
424 >     * would be necessary if all elements were non-Comparable and had
425 >     * tied hashes.) On insertion, to keep a total ordering (or as
426 >     * close as is required here) across rebalancings, we compare
427 >     * classes and identityHashCodes as tie-breakers. The red-black
428 >     * balancing code is updated from pre-jdk-collections
429       * (http://gee.cs.oswego.edu/dl/classes/collections/RBCell.java)
430       * based in turn on Cormen, Leiserson, and Rivest "Introduction to
431       * Algorithms" (CLR).
432       *
433       * TreeBins also require an additional locking mechanism.  While
434       * list traversal is always possible by readers even during
435 <     * updates, tree traversal is not, mainly beause of tree-rotations
435 >     * updates, tree traversal is not, mainly because of tree-rotations
436       * that may change the root node and/or its linkages.  TreeBins
437       * include a simple read-write lock mechanism parasitic on the
438       * main bin-synchronization strategy: Structural adjustments
# Line 441 | Line 448 | public class ConcurrentHashMap<K,V> impl
448       *
449       * Maintaining API and serialization compatibility with previous
450       * versions of this class introduces several oddities. Mainly: We
451 <     * leave untouched but unused constructor arguments refering to
451 >     * leave untouched but unused constructor arguments referring to
452       * concurrencyLevel. We accept a loadFactor constructor argument,
453       * but apply it only to initial table capacity (which is the only
454       * time that we can guarantee to honor it.) We also declare an
455       * unused "Segment" class that is instantiated in minimal form
456       * only when serializing.
457       *
458 +     * Also, solely for compatibility with previous versions of this
459 +     * class, it extends AbstractMap, even though all of its methods
460 +     * are overridden, so it is just useless baggage.
461 +     *
462       * This file is organized to make things a little easier to follow
463       * while reading than they might otherwise: First the main static
464       * declarations and utilities, then fields, then main public
# Line 528 | Line 539 | public class ConcurrentHashMap<K,V> impl
539       */
540      private static final int MIN_TRANSFER_STRIDE = 16;
541  
542 +    /**
543 +     * The number of bits used for generation stamp in sizeCtl.
544 +     * Must be at least 6 for 32bit arrays.
545 +     */
546 +    private static final int RESIZE_STAMP_BITS = 16;
547 +
548 +    /**
549 +     * The maximum number of threads that can help resize.
550 +     * Must fit in 32 - RESIZE_STAMP_BITS bits.
551 +     */
552 +    private static final int MAX_RESIZERS = (1 << (32 - RESIZE_STAMP_BITS)) - 1;
553 +
554 +    /**
555 +     * The bit shift for recording size stamp in sizeCtl.
556 +     */
557 +    private static final int RESIZE_STAMP_SHIFT = 32 - RESIZE_STAMP_BITS;
558 +
559      /*
560       * Encodings for Node hash fields. See above for explanation.
561       */
562 <    static final int MOVED     = 0x8fffffff; // (-1) hash for forwarding nodes
563 <    static final int TREEBIN   = 0x80000000; // hash for heads of treea
564 <    static final int RESERVED  = 0x80000001; // hash for transient reservations
562 >    static final int MOVED     = -1; // hash for forwarding nodes
563 >    static final int TREEBIN   = -2; // hash for roots of trees
564 >    static final int RESERVED  = -3; // hash for transient reservations
565      static final int HASH_BITS = 0x7fffffff; // usable bits of normal node hash
566  
567      /** Number of CPUS, to place bounds on some sizings */
568      static final int NCPU = Runtime.getRuntime().availableProcessors();
569  
570 <    /** For serialization compatibility. */
570 >    /**
571 >     * Serialized pseudo-fields, provided only for jdk7 compatibility.
572 >     * @serialField segments Segment[]
573 >     *   The segments, each of which is a specialized hash table.
574 >     * @serialField segmentMask int
575 >     *   Mask value for indexing into segments. The upper bits of a
576 >     *   key's hash code are used to choose the segment.
577 >     * @serialField segmentShift int
578 >     *   Shift value for indexing within segments.
579 >     */
580      private static final ObjectStreamField[] serialPersistentFields = {
581          new ObjectStreamField("segments", Segment[].class),
582          new ObjectStreamField("segmentMask", Integer.TYPE),
583 <        new ObjectStreamField("segmentShift", Integer.TYPE)
583 >        new ObjectStreamField("segmentShift", Integer.TYPE),
584      };
585  
586      /* ---------------- Nodes -------------- */
# Line 552 | Line 589 | public class ConcurrentHashMap<K,V> impl
589       * Key-value entry.  This class is never exported out as a
590       * user-mutable Map.Entry (i.e., one supporting setValue; see
591       * MapEntry below), but can be used for read-only traversals used
592 <     * in bulk tasks.  Subclasses of Node with a negativehash field
592 >     * in bulk tasks.  Subclasses of Node with a negative hash field
593       * are special, and contain null keys and values (but are never
594       * exported).  Otherwise, keys and vals are never null.
595       */
# Line 560 | Line 597 | public class ConcurrentHashMap<K,V> impl
597          final int hash;
598          final K key;
599          volatile V val;
600 <        Node<K,V> next;
600 >        volatile Node<K,V> next;
601  
602 <        Node(int hash, K key, V val, Node<K,V> next) {
602 >        Node(int hash, K key, V val) {
603              this.hash = hash;
604              this.key = key;
605              this.val = val;
606 +        }
607 +
608 +        Node(int hash, K key, V val, Node<K,V> next) {
609 +            this(hash, key, val);
610              this.next = next;
611          }
612  
613 <        public final K getKey()       { return key; }
614 <        public final V getValue()     { return val; }
615 <        public final int hashCode()   { return key.hashCode() ^ val.hashCode(); }
616 <        public final String toString(){ return key + "=" + val; }
613 >        public final K getKey()     { return key; }
614 >        public final V getValue()   { return val; }
615 >        public final int hashCode() { return key.hashCode() ^ val.hashCode(); }
616 >        public final String toString() {
617 >            return Helpers.mapEntryToString(key, val);
618 >        }
619          public final V setValue(V value) {
620              throw new UnsupportedOperationException();
621          }
# Line 645 | Line 688 | public class ConcurrentHashMap<K,V> impl
688       */
689      static Class<?> comparableClassFor(Object x) {
690          if (x instanceof Comparable) {
691 <            Class<?> c; Type[] ts, as; Type t; ParameterizedType p;
691 >            Class<?> c; Type[] ts, as; ParameterizedType p;
692              if ((c = x.getClass()) == String.class) // bypass checks
693                  return c;
694              if ((ts = c.getGenericInterfaces()) != null) {
695 <                for (int i = 0; i < ts.length; ++i) {
696 <                    if (((t = ts[i]) instanceof ParameterizedType) &&
695 >                for (Type t : ts) {
696 >                    if ((t instanceof ParameterizedType) &&
697                          ((p = (ParameterizedType)t).getRawType() ==
698                           Comparable.class) &&
699                          (as = p.getActualTypeArguments()) != null &&
# Line 675 | Line 718 | public class ConcurrentHashMap<K,V> impl
718      /* ---------------- Table element access -------------- */
719  
720      /*
721 <     * Volatile access methods are used for table elements as well as
721 >     * Atomic access methods are used for table elements as well as
722       * elements of in-progress next table while resizing.  All uses of
723       * the tab arguments must be null checked by callers.  All callers
724       * also paranoically precheck that tab's length is not zero (or an
# Line 685 | Line 728 | public class ConcurrentHashMap<K,V> impl
728       * errors by users, these checks must operate on local variables,
729       * which accounts for some odd-looking inline assignments below.
730       * Note that calls to setTabAt always occur within locked regions,
731 <     * and so do not need full volatile semantics, but still require
689 <     * ordering to maintain concurrent readability.
731 >     * and so require only release ordering.
732       */
733  
734      @SuppressWarnings("unchecked")
735      static final <K,V> Node<K,V> tabAt(Node<K,V>[] tab, int i) {
736 <        return (Node<K,V>)U.getObjectVolatile(tab, ((long)i << ASHIFT) + ABASE);
736 >        return (Node<K,V>)U.getObjectAcquire(tab, ((long)i << ASHIFT) + ABASE);
737      }
738  
739      static final <K,V> boolean casTabAt(Node<K,V>[] tab, int i,
740                                          Node<K,V> c, Node<K,V> v) {
741 <        return U.compareAndSwapObject(tab, ((long)i << ASHIFT) + ABASE, c, v);
741 >        return U.compareAndSetObject(tab, ((long)i << ASHIFT) + ABASE, c, v);
742      }
743  
744      static final <K,V> void setTabAt(Node<K,V>[] tab, int i, Node<K,V> v) {
745 <        U.putOrderedObject(tab, ((long)i << ASHIFT) + ABASE, v);
745 >        U.putObjectRelease(tab, ((long)i << ASHIFT) + ABASE, v);
746      }
747  
748      /* ---------------- Fields -------------- */
# Line 739 | Line 781 | public class ConcurrentHashMap<K,V> impl
781      private transient volatile int transferIndex;
782  
783      /**
742     * The least available table index to split while resizing.
743     */
744    private transient volatile int transferOrigin;
745
746    /**
784       * Spinlock (locked via CAS) used when resizing and/or creating CounterCells.
785       */
786      private transient volatile int cellsBusy;
# Line 956 | Line 993 | public class ConcurrentHashMap<K,V> impl
993          int hash = spread(key.hashCode());
994          int binCount = 0;
995          for (Node<K,V>[] tab = table;;) {
996 <            Node<K,V> f; int n, i, fh;
996 >            Node<K,V> f; int n, i, fh; K fk; V fv;
997              if (tab == null || (n = tab.length) == 0)
998                  tab = initTable();
999              else if ((f = tabAt(tab, i = (n - 1) & hash)) == null) {
1000 <                if (casTabAt(tab, i, null,
964 <                             new Node<K,V>(hash, key, value, null)))
1000 >                if (casTabAt(tab, i, null, new Node<K,V>(hash, key, value)))
1001                      break;                   // no lock when adding to empty bin
1002              }
1003              else if ((fh = f.hash) == MOVED)
1004                  tab = helpTransfer(tab, f);
1005 +            else if (onlyIfAbsent // check first node without acquiring lock
1006 +                     && fh == hash
1007 +                     && ((fk = f.key) == key || (fk != null && key.equals(fk)))
1008 +                     && (fv = f.val) != null)
1009 +                return fv;
1010              else {
1011                  V oldVal = null;
1012                  synchronized (f) {
# Line 984 | Line 1025 | public class ConcurrentHashMap<K,V> impl
1025                                  }
1026                                  Node<K,V> pred = e;
1027                                  if ((e = e.next) == null) {
1028 <                                    pred.next = new Node<K,V>(hash, key,
988 <                                                              value, null);
1028 >                                    pred.next = new Node<K,V>(hash, key, value);
1029                                      break;
1030                                  }
1031                              }
# Line 1000 | Line 1040 | public class ConcurrentHashMap<K,V> impl
1040                                      p.val = value;
1041                              }
1042                          }
1043 +                        else if (f instanceof ReservationNode)
1044 +                            throw new IllegalStateException("Recursive update");
1045                      }
1046                  }
1047                  if (binCount != 0) {
# Line 1102 | Line 1144 | public class ConcurrentHashMap<K,V> impl
1144                                  }
1145                              }
1146                          }
1147 +                        else if (f instanceof ReservationNode)
1148 +                            throw new IllegalStateException("Recursive update");
1149                      }
1150                  }
1151                  if (validated) {
# Line 1162 | Line 1206 | public class ConcurrentHashMap<K,V> impl
1206       * operations.  It does not support the {@code add} or
1207       * {@code addAll} operations.
1208       *
1209 <     * <p>The view's {@code iterator} is a "weakly consistent" iterator
1210 <     * that will never throw {@link ConcurrentModificationException},
1211 <     * and guarantees to traverse elements as they existed upon
1212 <     * construction of the iterator, and may (but is not guaranteed to)
1213 <     * reflect any modifications subsequent to construction.
1209 >     * <p>The view's iterators and spliterators are
1210 >     * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>.
1211 >     *
1212 >     * <p>The view's {@code spliterator} reports {@link Spliterator#CONCURRENT},
1213 >     * {@link Spliterator#DISTINCT}, and {@link Spliterator#NONNULL}.
1214       *
1215       * @return the set view
1216       */
1217      public KeySetView<K,V> keySet() {
1218          KeySetView<K,V> ks;
1219 <        return (ks = keySet) != null ? ks : (keySet = new KeySetView<K,V>(this, null));
1219 >        if ((ks = keySet) != null) return ks;
1220 >        return keySet = new KeySetView<K,V>(this, null);
1221      }
1222  
1223      /**
# Line 1185 | Line 1230 | public class ConcurrentHashMap<K,V> impl
1230       * {@code retainAll}, and {@code clear} operations.  It does not
1231       * support the {@code add} or {@code addAll} operations.
1232       *
1233 <     * <p>The view's {@code iterator} is a "weakly consistent" iterator
1234 <     * that will never throw {@link ConcurrentModificationException},
1235 <     * and guarantees to traverse elements as they existed upon
1236 <     * construction of the iterator, and may (but is not guaranteed to)
1237 <     * reflect any modifications subsequent to construction.
1233 >     * <p>The view's iterators and spliterators are
1234 >     * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>.
1235 >     *
1236 >     * <p>The view's {@code spliterator} reports {@link Spliterator#CONCURRENT}
1237 >     * and {@link Spliterator#NONNULL}.
1238       *
1239       * @return the collection view
1240       */
1241      public Collection<V> values() {
1242          ValuesView<K,V> vs;
1243 <        return (vs = values) != null ? vs : (values = new ValuesView<K,V>(this));
1243 >        if ((vs = values) != null) return vs;
1244 >        return values = new ValuesView<K,V>(this);
1245      }
1246  
1247      /**
# Line 1207 | Line 1253 | public class ConcurrentHashMap<K,V> impl
1253       * {@code removeAll}, {@code retainAll}, and {@code clear}
1254       * operations.
1255       *
1256 <     * <p>The view's {@code iterator} is a "weakly consistent" iterator
1257 <     * that will never throw {@link ConcurrentModificationException},
1258 <     * and guarantees to traverse elements as they existed upon
1259 <     * construction of the iterator, and may (but is not guaranteed to)
1260 <     * reflect any modifications subsequent to construction.
1256 >     * <p>The view's iterators and spliterators are
1257 >     * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>.
1258 >     *
1259 >     * <p>The view's {@code spliterator} reports {@link Spliterator#CONCURRENT},
1260 >     * {@link Spliterator#DISTINCT}, and {@link Spliterator#NONNULL}.
1261       *
1262       * @return the set view
1263       */
1264      public Set<Map.Entry<K,V>> entrySet() {
1265          EntrySetView<K,V> es;
1266 <        return (es = entrySet) != null ? es : (entrySet = new EntrySetView<K,V>(this));
1266 >        if ((es = entrySet) != null) return es;
1267 >        return entrySet = new EntrySetView<K,V>(this);
1268      }
1269  
1270      /**
# Line 1309 | Line 1356 | public class ConcurrentHashMap<K,V> impl
1356  
1357      /**
1358       * Stripped-down version of helper class used in previous version,
1359 <     * declared for the sake of serialization compatibility
1359 >     * declared for the sake of serialization compatibility.
1360       */
1361      static class Segment<K,V> extends ReentrantLock implements Serializable {
1362          private static final long serialVersionUID = 2249069246763182397L;
# Line 1318 | Line 1365 | public class ConcurrentHashMap<K,V> impl
1365      }
1366  
1367      /**
1368 <     * Saves the state of the {@code ConcurrentHashMap} instance to a
1369 <     * stream (i.e., serializes it).
1368 >     * Saves this map to a stream (that is, serializes it).
1369 >     *
1370       * @param s the stream
1371 +     * @throws java.io.IOException if an I/O error occurs
1372       * @serialData
1373 <     * the key (Object) and value (Object)
1374 <     * for each key-value mapping, followed by a null pair.
1373 >     * the serialized fields, followed by the key (Object) and value
1374 >     * (Object) for each key-value mapping, followed by a null pair.
1375       * The key-value mappings are emitted in no particular order.
1376       */
1377      private void writeObject(java.io.ObjectOutputStream s)
# Line 1338 | Line 1386 | public class ConcurrentHashMap<K,V> impl
1386          }
1387          int segmentShift = 32 - sshift;
1388          int segmentMask = ssize - 1;
1389 <        @SuppressWarnings("unchecked") Segment<K,V>[] segments = (Segment<K,V>[])
1389 >        @SuppressWarnings("unchecked")
1390 >        Segment<K,V>[] segments = (Segment<K,V>[])
1391              new Segment<?,?>[DEFAULT_CONCURRENCY_LEVEL];
1392          for (int i = 0; i < segments.length; ++i)
1393              segments[i] = new Segment<K,V>(LOAD_FACTOR);
1394 <        s.putFields().put("segments", segments);
1395 <        s.putFields().put("segmentShift", segmentShift);
1396 <        s.putFields().put("segmentMask", segmentMask);
1394 >        java.io.ObjectOutputStream.PutField streamFields = s.putFields();
1395 >        streamFields.put("segments", segments);
1396 >        streamFields.put("segmentShift", segmentShift);
1397 >        streamFields.put("segmentMask", segmentMask);
1398          s.writeFields();
1399  
1400          Node<K,V>[] t;
# Line 1357 | Line 1407 | public class ConcurrentHashMap<K,V> impl
1407          }
1408          s.writeObject(null);
1409          s.writeObject(null);
1360        segments = null; // throw away
1410      }
1411  
1412      /**
1413 <     * Reconstitutes the instance from a stream (that is, deserializes it).
1413 >     * Reconstitutes this map from a stream (that is, deserializes it).
1414       * @param s the stream
1415 +     * @throws ClassNotFoundException if the class of a serialized object
1416 +     *         could not be found
1417 +     * @throws java.io.IOException if an I/O error occurs
1418       */
1419      private void readObject(java.io.ObjectInputStream s)
1420          throws java.io.IOException, ClassNotFoundException {
# Line 1378 | Line 1430 | public class ConcurrentHashMap<K,V> impl
1430          long size = 0L;
1431          Node<K,V> p = null;
1432          for (;;) {
1433 <            @SuppressWarnings("unchecked") K k = (K) s.readObject();
1434 <            @SuppressWarnings("unchecked") V v = (V) s.readObject();
1433 >            @SuppressWarnings("unchecked")
1434 >            K k = (K) s.readObject();
1435 >            @SuppressWarnings("unchecked")
1436 >            V v = (V) s.readObject();
1437              if (k != null && v != null) {
1438                  p = new Node<K,V>(spread(k.hashCode()), k, v, p);
1439                  ++size;
# Line 1397 | Line 1451 | public class ConcurrentHashMap<K,V> impl
1451                  int sz = (int)size;
1452                  n = tableSizeFor(sz + (sz >>> 1) + 1);
1453              }
1454 <            @SuppressWarnings({"rawtypes","unchecked"})
1455 <                Node<K,V>[] tab = (Node<K,V>[])new Node[n];
1454 >            @SuppressWarnings("unchecked")
1455 >            Node<K,V>[] tab = (Node<K,V>[])new Node<?,?>[n];
1456              int mask = n - 1;
1457              long added = 0L;
1458              while (p != null) {
# Line 1556 | Line 1610 | public class ConcurrentHashMap<K,V> impl
1610      }
1611  
1612      /**
1613 +     * Helper method for EntrySetView.removeIf.
1614 +     */
1615 +    boolean removeEntryIf(Predicate<? super Entry<K,V>> function) {
1616 +        if (function == null) throw new NullPointerException();
1617 +        Node<K,V>[] t;
1618 +        boolean removed = false;
1619 +        if ((t = table) != null) {
1620 +            Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
1621 +            for (Node<K,V> p; (p = it.advance()) != null; ) {
1622 +                K k = p.key;
1623 +                V v = p.val;
1624 +                Map.Entry<K,V> e = new AbstractMap.SimpleImmutableEntry<>(k, v);
1625 +                if (function.test(e) && replaceNode(k, null, v) != null)
1626 +                    removed = true;
1627 +            }
1628 +        }
1629 +        return removed;
1630 +    }
1631 +
1632 +    /**
1633 +     * Helper method for ValuesView.removeIf.
1634 +     */
1635 +    boolean removeValueIf(Predicate<? super V> function) {
1636 +        if (function == null) throw new NullPointerException();
1637 +        Node<K,V>[] t;
1638 +        boolean removed = false;
1639 +        if ((t = table) != null) {
1640 +            Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
1641 +            for (Node<K,V> p; (p = it.advance()) != null; ) {
1642 +                K k = p.key;
1643 +                V v = p.val;
1644 +                if (function.test(v) && replaceNode(k, null, v) != null)
1645 +                    removed = true;
1646 +            }
1647 +        }
1648 +        return removed;
1649 +    }
1650 +
1651 +    /**
1652       * If the specified key is not already associated with a value,
1653       * attempts to compute its value using the given mapping function
1654       * and enters it into this map unless {@code null}.  The entire
# Line 1584 | Line 1677 | public class ConcurrentHashMap<K,V> impl
1677          V val = null;
1678          int binCount = 0;
1679          for (Node<K,V>[] tab = table;;) {
1680 <            Node<K,V> f; int n, i, fh;
1680 >            Node<K,V> f; int n, i, fh; K fk; V fv;
1681              if (tab == null || (n = tab.length) == 0)
1682                  tab = initTable();
1683              else if ((f = tabAt(tab, i = (n - 1) & h)) == null) {
# Line 1595 | Line 1688 | public class ConcurrentHashMap<K,V> impl
1688                          Node<K,V> node = null;
1689                          try {
1690                              if ((val = mappingFunction.apply(key)) != null)
1691 <                                node = new Node<K,V>(h, key, val, null);
1691 >                                node = new Node<K,V>(h, key, val);
1692                          } finally {
1693                              setTabAt(tab, i, node);
1694                          }
# Line 1606 | Line 1699 | public class ConcurrentHashMap<K,V> impl
1699              }
1700              else if ((fh = f.hash) == MOVED)
1701                  tab = helpTransfer(tab, f);
1702 +            else if (fh == h    // check first node without acquiring lock
1703 +                     && ((fk = f.key) == key || (fk != null && key.equals(fk)))
1704 +                     && (fv = f.val) != null)
1705 +                return fv;
1706              else {
1707                  boolean added = false;
1708                  synchronized (f) {
# Line 1613 | Line 1710 | public class ConcurrentHashMap<K,V> impl
1710                          if (fh >= 0) {
1711                              binCount = 1;
1712                              for (Node<K,V> e = f;; ++binCount) {
1713 <                                K ek; V ev;
1713 >                                K ek;
1714                                  if (e.hash == h &&
1715                                      ((ek = e.key) == key ||
1716                                       (ek != null && key.equals(ek)))) {
# Line 1623 | Line 1720 | public class ConcurrentHashMap<K,V> impl
1720                                  Node<K,V> pred = e;
1721                                  if ((e = e.next) == null) {
1722                                      if ((val = mappingFunction.apply(key)) != null) {
1723 +                                        if (pred.next != null)
1724 +                                            throw new IllegalStateException("Recursive update");
1725                                          added = true;
1726 <                                        pred.next = new Node<K,V>(h, key, val, null);
1726 >                                        pred.next = new Node<K,V>(h, key, val);
1727                                      }
1728                                      break;
1729                                  }
# Line 1642 | Line 1741 | public class ConcurrentHashMap<K,V> impl
1741                                  t.putTreeVal(h, key, val);
1742                              }
1743                          }
1744 +                        else if (f instanceof ReservationNode)
1745 +                            throw new IllegalStateException("Recursive update");
1746                      }
1747                  }
1748                  if (binCount != 0) {
# Line 1737 | Line 1838 | public class ConcurrentHashMap<K,V> impl
1838                                  }
1839                              }
1840                          }
1841 +                        else if (f instanceof ReservationNode)
1842 +                            throw new IllegalStateException("Recursive update");
1843                      }
1844                  }
1845                  if (binCount != 0)
# Line 1789 | Line 1892 | public class ConcurrentHashMap<K,V> impl
1892                          try {
1893                              if ((val = remappingFunction.apply(key, null)) != null) {
1894                                  delta = 1;
1895 <                                node = new Node<K,V>(h, key, val, null);
1895 >                                node = new Node<K,V>(h, key, val);
1896                              }
1897                          } finally {
1898                              setTabAt(tab, i, node);
# Line 1828 | Line 1931 | public class ConcurrentHashMap<K,V> impl
1931                                  if ((e = e.next) == null) {
1932                                      val = remappingFunction.apply(key, null);
1933                                      if (val != null) {
1934 +                                        if (pred.next != null)
1935 +                                            throw new IllegalStateException("Recursive update");
1936                                          delta = 1;
1937 <                                        pred.next =
1833 <                                            new Node<K,V>(h, key, val, null);
1937 >                                        pred.next = new Node<K,V>(h, key, val);
1938                                      }
1939                                      break;
1940                                  }
# Line 1860 | Line 1964 | public class ConcurrentHashMap<K,V> impl
1964                                      setTabAt(tab, i, untreeify(t.first));
1965                              }
1966                          }
1967 +                        else if (f instanceof ReservationNode)
1968 +                            throw new IllegalStateException("Recursive update");
1969                      }
1970                  }
1971                  if (binCount != 0) {
# Line 1906 | Line 2012 | public class ConcurrentHashMap<K,V> impl
2012              if (tab == null || (n = tab.length) == 0)
2013                  tab = initTable();
2014              else if ((f = tabAt(tab, i = (n - 1) & h)) == null) {
2015 <                if (casTabAt(tab, i, null, new Node<K,V>(h, key, value, null))) {
2015 >                if (casTabAt(tab, i, null, new Node<K,V>(h, key, value))) {
2016                      delta = 1;
2017                      val = value;
2018                      break;
# Line 1941 | Line 2047 | public class ConcurrentHashMap<K,V> impl
2047                                  if ((e = e.next) == null) {
2048                                      delta = 1;
2049                                      val = value;
2050 <                                    pred.next =
1945 <                                        new Node<K,V>(h, key, val, null);
2050 >                                    pred.next = new Node<K,V>(h, key, val);
2051                                      break;
2052                                  }
2053                              }
# Line 1969 | Line 2074 | public class ConcurrentHashMap<K,V> impl
2074                                      setTabAt(tab, i, untreeify(t.first));
2075                              }
2076                          }
2077 +                        else if (f instanceof ReservationNode)
2078 +                            throw new IllegalStateException("Recursive update");
2079                      }
2080                  }
2081                  if (binCount != 0) {
# Line 1986 | Line 2093 | public class ConcurrentHashMap<K,V> impl
2093      // Hashtable legacy methods
2094  
2095      /**
2096 <     * Legacy method testing if some key maps into the specified value
2097 <     * in this table.  This method is identical in functionality to
2096 >     * Tests if some key maps into the specified value in this table.
2097 >     *
2098 >     * <p>Note that this method is identical in functionality to
2099       * {@link #containsValue(Object)}, and exists solely to ensure
2100       * full compatibility with class {@link java.util.Hashtable},
2101       * which supported this method prior to introduction of the
2102 <     * Java Collections framework.
2102 >     * Java Collections Framework.
2103       *
2104       * @param  value a value to search for
2105       * @return {@code true} if and only if some key maps to the
# Line 2000 | Line 2108 | public class ConcurrentHashMap<K,V> impl
2108       *         {@code false} otherwise
2109       * @throws NullPointerException if the specified value is null
2110       */
2111 <    @Deprecated public boolean contains(Object value) {
2111 >    public boolean contains(Object value) {
2112          return containsValue(value);
2113      }
2114  
# Line 2049 | Line 2157 | public class ConcurrentHashMap<K,V> impl
2157       * Creates a new {@link Set} backed by a ConcurrentHashMap
2158       * from the given type to {@code Boolean.TRUE}.
2159       *
2160 +     * @param <K> the element type of the returned set
2161       * @return the new set
2162       * @since 1.8
2163       */
# Line 2063 | Line 2172 | public class ConcurrentHashMap<K,V> impl
2172       *
2173       * @param initialCapacity The implementation performs internal
2174       * sizing to accommodate this many elements.
2175 +     * @param <K> the element type of the returned set
2176 +     * @return the new set
2177       * @throws IllegalArgumentException if the initial capacity of
2178       * elements is negative
2068     * @return the new set
2179       * @since 1.8
2180       */
2181      public static <K> KeySetView<K,Boolean> newKeySet(int initialCapacity) {
# Line 2098 | Line 2208 | public class ConcurrentHashMap<K,V> impl
2208      static final class ForwardingNode<K,V> extends Node<K,V> {
2209          final Node<K,V>[] nextTable;
2210          ForwardingNode(Node<K,V>[] tab) {
2211 <            super(MOVED, null, null, null);
2211 >            super(MOVED, null, null);
2212              this.nextTable = tab;
2213          }
2214  
2215          Node<K,V> find(int h, Object k) {
2216 <            Node<K,V> e; int n;
2217 <            Node<K,V>[] tab = nextTable;
2218 <            if (k != null && tab != null && (n = tab.length) > 0 &&
2219 <                (e = tabAt(tab, (n - 1) & h)) != null) {
2220 <                do {
2216 >            // loop to avoid arbitrarily deep recursion on forwarding nodes
2217 >            outer: for (Node<K,V>[] tab = nextTable;;) {
2218 >                Node<K,V> e; int n;
2219 >                if (k == null || tab == null || (n = tab.length) == 0 ||
2220 >                    (e = tabAt(tab, (n - 1) & h)) == null)
2221 >                    return null;
2222 >                for (;;) {
2223                      int eh; K ek;
2224                      if ((eh = e.hash) == h &&
2225                          ((ek = e.key) == k || (ek != null && k.equals(ek))))
2226                          return e;
2227 <                    if (eh < 0)
2228 <                        return e.find(h, k);
2229 <                } while ((e = e.next) != null);
2227 >                    if (eh < 0) {
2228 >                        if (e instanceof ForwardingNode) {
2229 >                            tab = ((ForwardingNode<K,V>)e).nextTable;
2230 >                            continue outer;
2231 >                        }
2232 >                        else
2233 >                            return e.find(h, k);
2234 >                    }
2235 >                    if ((e = e.next) == null)
2236 >                        return null;
2237 >                }
2238              }
2119            return null;
2239          }
2240      }
2241  
2242      /**
2243 <     * A place-holder node used in computeIfAbsent and compute
2243 >     * A place-holder node used in computeIfAbsent and compute.
2244       */
2245      static final class ReservationNode<K,V> extends Node<K,V> {
2246          ReservationNode() {
2247 <            super(RESERVED, null, null, null);
2247 >            super(RESERVED, null, null);
2248          }
2249  
2250          Node<K,V> find(int h, Object k) {
# Line 2136 | Line 2255 | public class ConcurrentHashMap<K,V> impl
2255      /* ---------------- Table Initialization and Resizing -------------- */
2256  
2257      /**
2258 +     * Returns the stamp bits for resizing a table of size n.
2259 +     * Must be negative when shifted left by RESIZE_STAMP_SHIFT.
2260 +     */
2261 +    static final int resizeStamp(int n) {
2262 +        return Integer.numberOfLeadingZeros(n) | (1 << (RESIZE_STAMP_BITS - 1));
2263 +    }
2264 +
2265 +    /**
2266       * Initializes table, using the size recorded in sizeCtl.
2267       */
2268      private final Node<K,V>[] initTable() {
# Line 2143 | Line 2270 | public class ConcurrentHashMap<K,V> impl
2270          while ((tab = table) == null || tab.length == 0) {
2271              if ((sc = sizeCtl) < 0)
2272                  Thread.yield(); // lost initialization race; just spin
2273 <            else if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
2273 >            else if (U.compareAndSetInt(this, SIZECTL, sc, -1)) {
2274                  try {
2275                      if ((tab = table) == null || tab.length == 0) {
2276                          int n = (sc > 0) ? sc : DEFAULT_CAPACITY;
2277 <                        @SuppressWarnings({"rawtypes","unchecked"})
2278 <                            Node<K,V>[] nt = (Node<K,V>[])new Node[n];
2277 >                        @SuppressWarnings("unchecked")
2278 >                        Node<K,V>[] nt = (Node<K,V>[])new Node<?,?>[n];
2279                          table = tab = nt;
2280                          sc = n - (n >>> 2);
2281                      }
# Line 2172 | Line 2299 | public class ConcurrentHashMap<K,V> impl
2299       * @param check if <0, don't check resize, if <= 1 only check if uncontended
2300       */
2301      private final void addCount(long x, int check) {
2302 <        CounterCell[] as; long b, s;
2303 <        if ((as = counterCells) != null ||
2304 <            !U.compareAndSwapLong(this, BASECOUNT, b = baseCount, s = b + x)) {
2305 <            CounterCell a; long v; int m;
2302 >        CounterCell[] cs; long b, s;
2303 >        if ((cs = counterCells) != null ||
2304 >            !U.compareAndSetLong(this, BASECOUNT, b = baseCount, s = b + x)) {
2305 >            CounterCell c; long v; int m;
2306              boolean uncontended = true;
2307 <            if (as == null || (m = as.length - 1) < 0 ||
2308 <                (a = as[ThreadLocalRandom.getProbe() & m]) == null ||
2307 >            if (cs == null || (m = cs.length - 1) < 0 ||
2308 >                (c = cs[ThreadLocalRandom.getProbe() & m]) == null ||
2309                  !(uncontended =
2310 <                  U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))) {
2310 >                  U.compareAndSetLong(c, CELLVALUE, v = c.value, v + x))) {
2311                  fullAddCount(x, uncontended);
2312                  return;
2313              }
# Line 2189 | Line 2316 | public class ConcurrentHashMap<K,V> impl
2316              s = sumCount();
2317          }
2318          if (check >= 0) {
2319 <            Node<K,V>[] tab, nt; int sc;
2319 >            Node<K,V>[] tab, nt; int n, sc;
2320              while (s >= (long)(sc = sizeCtl) && (tab = table) != null &&
2321 <                   tab.length < MAXIMUM_CAPACITY) {
2321 >                   (n = tab.length) < MAXIMUM_CAPACITY) {
2322 >                int rs = resizeStamp(n);
2323                  if (sc < 0) {
2324 <                    if (sc == -1 || transferIndex <= transferOrigin ||
2325 <                        (nt = nextTable) == null)
2324 >                    if ((sc >>> RESIZE_STAMP_SHIFT) != rs || sc == rs + 1 ||
2325 >                        sc == rs + MAX_RESIZERS || (nt = nextTable) == null ||
2326 >                        transferIndex <= 0)
2327                          break;
2328 <                    if (U.compareAndSwapInt(this, SIZECTL, sc, sc - 1))
2328 >                    if (U.compareAndSetInt(this, SIZECTL, sc, sc + 1))
2329                          transfer(tab, nt);
2330                  }
2331 <                else if (U.compareAndSwapInt(this, SIZECTL, sc, -2))
2331 >                else if (U.compareAndSetInt(this, SIZECTL, sc,
2332 >                                             (rs << RESIZE_STAMP_SHIFT) + 2))
2333                      transfer(tab, null);
2334                  s = sumCount();
2335              }
# Line 2211 | Line 2341 | public class ConcurrentHashMap<K,V> impl
2341       */
2342      final Node<K,V>[] helpTransfer(Node<K,V>[] tab, Node<K,V> f) {
2343          Node<K,V>[] nextTab; int sc;
2344 <        if ((f instanceof ForwardingNode) &&
2344 >        if (tab != null && (f instanceof ForwardingNode) &&
2345              (nextTab = ((ForwardingNode<K,V>)f).nextTable) != null) {
2346 <            if (nextTab == nextTable && tab == table &&
2347 <                transferIndex > transferOrigin && (sc = sizeCtl) < -1 &&
2348 <                U.compareAndSwapInt(this, SIZECTL, sc, sc - 1))
2349 <                transfer(tab, nextTab);
2346 >            int rs = resizeStamp(tab.length);
2347 >            while (nextTab == nextTable && table == tab &&
2348 >                   (sc = sizeCtl) < 0) {
2349 >                if ((sc >>> RESIZE_STAMP_SHIFT) != rs || sc == rs + 1 ||
2350 >                    sc == rs + MAX_RESIZERS || transferIndex <= 0)
2351 >                    break;
2352 >                if (U.compareAndSetInt(this, SIZECTL, sc, sc + 1)) {
2353 >                    transfer(tab, nextTab);
2354 >                    break;
2355 >                }
2356 >            }
2357              return nextTab;
2358          }
2359          return table;
# Line 2235 | Line 2372 | public class ConcurrentHashMap<K,V> impl
2372              Node<K,V>[] tab = table; int n;
2373              if (tab == null || (n = tab.length) == 0) {
2374                  n = (sc > c) ? sc : c;
2375 <                if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
2375 >                if (U.compareAndSetInt(this, SIZECTL, sc, -1)) {
2376                      try {
2377                          if (table == tab) {
2378 <                            @SuppressWarnings({"rawtypes","unchecked"})
2379 <                                Node<K,V>[] nt = (Node<K,V>[])new Node[n];
2378 >                            @SuppressWarnings("unchecked")
2379 >                            Node<K,V>[] nt = (Node<K,V>[])new Node<?,?>[n];
2380                              table = nt;
2381                              sc = n - (n >>> 2);
2382                          }
# Line 2250 | Line 2387 | public class ConcurrentHashMap<K,V> impl
2387              }
2388              else if (c <= sc || n >= MAXIMUM_CAPACITY)
2389                  break;
2390 <            else if (tab == table &&
2391 <                     U.compareAndSwapInt(this, SIZECTL, sc, -2))
2392 <                transfer(tab, null);
2390 >            else if (tab == table) {
2391 >                int rs = resizeStamp(n);
2392 >                if (U.compareAndSetInt(this, SIZECTL, sc,
2393 >                                        (rs << RESIZE_STAMP_SHIFT) + 2))
2394 >                    transfer(tab, null);
2395 >            }
2396          }
2397      }
2398  
# Line 2266 | Line 2406 | public class ConcurrentHashMap<K,V> impl
2406              stride = MIN_TRANSFER_STRIDE; // subdivide range
2407          if (nextTab == null) {            // initiating
2408              try {
2409 <                @SuppressWarnings({"rawtypes","unchecked"})
2410 <                    Node<K,V>[] nt = (Node<K,V>[])new Node[n << 1];
2409 >                @SuppressWarnings("unchecked")
2410 >                Node<K,V>[] nt = (Node<K,V>[])new Node<?,?>[n << 1];
2411                  nextTab = nt;
2412              } catch (Throwable ex) {      // try to cope with OOME
2413                  sizeCtl = Integer.MAX_VALUE;
2414                  return;
2415              }
2416              nextTable = nextTab;
2277            transferOrigin = n;
2417              transferIndex = n;
2279            ForwardingNode<K,V> rev = new ForwardingNode<K,V>(tab);
2280            for (int k = n; k > 0;) {    // progressively reveal ready slots
2281                int nextk = (k > stride) ? k - stride : 0;
2282                for (int m = nextk; m < k; ++m)
2283                    nextTab[m] = rev;
2284                for (int m = n + nextk; m < n + k; ++m)
2285                    nextTab[m] = rev;
2286                U.putOrderedInt(this, TRANSFERORIGIN, k = nextk);
2287            }
2418          }
2419          int nextn = nextTab.length;
2420          ForwardingNode<K,V> fwd = new ForwardingNode<K,V>(nextTab);
2421          boolean advance = true;
2422 +        boolean finishing = false; // to ensure sweep before committing nextTab
2423          for (int i = 0, bound = 0;;) {
2424 <            int nextIndex, nextBound, fh; Node<K,V> f;
2424 >            Node<K,V> f; int fh;
2425              while (advance) {
2426 <                if (--i >= bound)
2426 >                int nextIndex, nextBound;
2427 >                if (--i >= bound || finishing)
2428                      advance = false;
2429 <                else if ((nextIndex = transferIndex) <= transferOrigin) {
2429 >                else if ((nextIndex = transferIndex) <= 0) {
2430                      i = -1;
2431                      advance = false;
2432                  }
2433 <                else if (U.compareAndSwapInt
2433 >                else if (U.compareAndSetInt
2434                           (this, TRANSFERINDEX, nextIndex,
2435                            nextBound = (nextIndex > stride ?
2436                                         nextIndex - stride : 0))) {
# Line 2308 | Line 2440 | public class ConcurrentHashMap<K,V> impl
2440                  }
2441              }
2442              if (i < 0 || i >= n || i + n >= nextn) {
2443 <                for (int sc;;) {
2444 <                    if (U.compareAndSwapInt(this, SIZECTL, sc = sizeCtl, ++sc)) {
2445 <                        if (sc == -1) {
2446 <                            nextTable = null;
2447 <                            table = nextTab;
2448 <                            sizeCtl = (n << 1) - (n >>> 1);
2317 <                        }
2318 <                        return;
2319 <                    }
2443 >                int sc;
2444 >                if (finishing) {
2445 >                    nextTable = null;
2446 >                    table = nextTab;
2447 >                    sizeCtl = (n << 1) - (n >>> 1);
2448 >                    return;
2449                  }
2450 <            }
2451 <            else if ((f = tabAt(tab, i)) == null) {
2452 <                if (casTabAt(tab, i, null, fwd)) {
2453 <                    setTabAt(nextTab, i, null);
2454 <                    setTabAt(nextTab, i + n, null);
2326 <                    advance = true;
2450 >                if (U.compareAndSetInt(this, SIZECTL, sc = sizeCtl, sc - 1)) {
2451 >                    if ((sc - 2) != resizeStamp(n) << RESIZE_STAMP_SHIFT)
2452 >                        return;
2453 >                    finishing = advance = true;
2454 >                    i = n; // recheck before commit
2455                  }
2456              }
2457 +            else if ((f = tabAt(tab, i)) == null)
2458 +                advance = casTabAt(tab, i, null, fwd);
2459              else if ((fh = f.hash) == MOVED)
2460                  advance = true; // already processed
2461              else {
# Line 2357 | Line 2487 | public class ConcurrentHashMap<K,V> impl
2487                                  else
2488                                      hn = new Node<K,V>(ph, pk, pv, hn);
2489                              }
2490 +                            setTabAt(nextTab, i, ln);
2491 +                            setTabAt(nextTab, i + n, hn);
2492 +                            setTabAt(tab, i, fwd);
2493 +                            advance = true;
2494                          }
2495                          else if (f instanceof TreeBin) {
2496                              TreeBin<K,V> t = (TreeBin<K,V>)f;
# Line 2388 | Line 2522 | public class ConcurrentHashMap<K,V> impl
2522                                  (hc != 0) ? new TreeBin<K,V>(lo) : t;
2523                              hn = (hc <= UNTREEIFY_THRESHOLD) ? untreeify(hi) :
2524                                  (lc != 0) ? new TreeBin<K,V>(hi) : t;
2525 +                            setTabAt(nextTab, i, ln);
2526 +                            setTabAt(nextTab, i + n, hn);
2527 +                            setTabAt(tab, i, fwd);
2528 +                            advance = true;
2529                          }
2392                        else
2393                            ln = hn = null;
2394                        setTabAt(nextTab, i, ln);
2395                        setTabAt(nextTab, i + n, hn);
2396                        setTabAt(tab, i, fwd);
2397                        advance = true;
2530                      }
2531                  }
2532              }
# Line 2407 | Line 2539 | public class ConcurrentHashMap<K,V> impl
2539       * A padded cell for distributing counts.  Adapted from LongAdder
2540       * and Striped64.  See their internal docs for explanation.
2541       */
2542 <    @sun.misc.Contended static final class CounterCell {
2542 >    @jdk.internal.vm.annotation.Contended static final class CounterCell {
2543          volatile long value;
2544          CounterCell(long x) { value = x; }
2545      }
2546  
2547      final long sumCount() {
2548 <        CounterCell[] as = counterCells; CounterCell a;
2548 >        CounterCell[] cs = counterCells;
2549          long sum = baseCount;
2550 <        if (as != null) {
2551 <            for (int i = 0; i < as.length; ++i) {
2552 <                if ((a = as[i]) != null)
2553 <                    sum += a.value;
2422 <            }
2550 >        if (cs != null) {
2551 >            for (CounterCell c : cs)
2552 >                if (c != null)
2553 >                    sum += c.value;
2554          }
2555          return sum;
2556      }
# Line 2434 | Line 2565 | public class ConcurrentHashMap<K,V> impl
2565          }
2566          boolean collide = false;                // True if last slot nonempty
2567          for (;;) {
2568 <            CounterCell[] as; CounterCell a; int n; long v;
2569 <            if ((as = counterCells) != null && (n = as.length) > 0) {
2570 <                if ((a = as[(n - 1) & h]) == null) {
2568 >            CounterCell[] cs; CounterCell c; int n; long v;
2569 >            if ((cs = counterCells) != null && (n = cs.length) > 0) {
2570 >                if ((c = cs[(n - 1) & h]) == null) {
2571                      if (cellsBusy == 0) {            // Try to attach new Cell
2572                          CounterCell r = new CounterCell(x); // Optimistic create
2573                          if (cellsBusy == 0 &&
2574 <                            U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
2574 >                            U.compareAndSetInt(this, CELLSBUSY, 0, 1)) {
2575                              boolean created = false;
2576                              try {               // Recheck under lock
2577                                  CounterCell[] rs; int m, j;
# Line 2462 | Line 2593 | public class ConcurrentHashMap<K,V> impl
2593                  }
2594                  else if (!wasUncontended)       // CAS already known to fail
2595                      wasUncontended = true;      // Continue after rehash
2596 <                else if (U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))
2596 >                else if (U.compareAndSetLong(c, CELLVALUE, v = c.value, v + x))
2597                      break;
2598 <                else if (counterCells != as || n >= NCPU)
2598 >                else if (counterCells != cs || n >= NCPU)
2599                      collide = false;            // At max size or stale
2600                  else if (!collide)
2601                      collide = true;
2602                  else if (cellsBusy == 0 &&
2603 <                         U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
2603 >                         U.compareAndSetInt(this, CELLSBUSY, 0, 1)) {
2604                      try {
2605 <                        if (counterCells == as) {// Expand table unless stale
2606 <                            CounterCell[] rs = new CounterCell[n << 1];
2476 <                            for (int i = 0; i < n; ++i)
2477 <                                rs[i] = as[i];
2478 <                            counterCells = rs;
2479 <                        }
2605 >                        if (counterCells == cs) // Expand table unless stale
2606 >                            counterCells = Arrays.copyOf(cs, n << 1);
2607                      } finally {
2608                          cellsBusy = 0;
2609                      }
# Line 2485 | Line 2612 | public class ConcurrentHashMap<K,V> impl
2612                  }
2613                  h = ThreadLocalRandom.advanceProbe(h);
2614              }
2615 <            else if (cellsBusy == 0 && counterCells == as &&
2616 <                     U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
2615 >            else if (cellsBusy == 0 && counterCells == cs &&
2616 >                     U.compareAndSetInt(this, CELLSBUSY, 0, 1)) {
2617                  boolean init = false;
2618                  try {                           // Initialize table
2619 <                    if (counterCells == as) {
2619 >                    if (counterCells == cs) {
2620                          CounterCell[] rs = new CounterCell[2];
2621                          rs[h & 1] = new CounterCell(x);
2622                          counterCells = rs;
# Line 2501 | Line 2628 | public class ConcurrentHashMap<K,V> impl
2628                  if (init)
2629                      break;
2630              }
2631 <            else if (U.compareAndSwapLong(this, BASECOUNT, v = baseCount, v + x))
2631 >            else if (U.compareAndSetLong(this, BASECOUNT, v = baseCount, v + x))
2632                  break;                          // Fall back on using base
2633          }
2634      }
# Line 2513 | Line 2640 | public class ConcurrentHashMap<K,V> impl
2640       * too small, in which case resizes instead.
2641       */
2642      private final void treeifyBin(Node<K,V>[] tab, int index) {
2643 <        Node<K,V> b; int n, sc;
2643 >        Node<K,V> b; int n;
2644          if (tab != null) {
2645 <            if ((n = tab.length) < MIN_TREEIFY_CAPACITY) {
2646 <                if (tab == table && (sc = sizeCtl) >= 0 &&
2647 <                    U.compareAndSwapInt(this, SIZECTL, sc, -2))
2521 <                    transfer(tab, null);
2522 <            }
2523 <            else if ((b = tabAt(tab, index)) != null) {
2645 >            if ((n = tab.length) < MIN_TREEIFY_CAPACITY)
2646 >                tryPresize(n << 1);
2647 >            else if ((b = tabAt(tab, index)) != null && b.hash >= 0) {
2648                  synchronized (b) {
2649                      if (tabAt(tab, index) == b) {
2650                          TreeNode<K,V> hd = null, tl = null;
# Line 2542 | Line 2666 | public class ConcurrentHashMap<K,V> impl
2666      }
2667  
2668      /**
2669 <     * Returns a list on non-TreeNodes replacing those in given list
2669 >     * Returns a list of non-TreeNodes replacing those in given list.
2670       */
2671      static <K,V> Node<K,V> untreeify(Node<K,V> b) {
2672          Node<K,V> hd = null, tl = null;
2673          for (Node<K,V> q = b; q != null; q = q.next) {
2674 <            Node<K,V> p = new Node<K,V>(q.hash, q.key, q.val, null);
2674 >            Node<K,V> p = new Node<K,V>(q.hash, q.key, q.val);
2675              if (tl == null)
2676                  hd = p;
2677              else
# Line 2560 | Line 2684 | public class ConcurrentHashMap<K,V> impl
2684      /* ---------------- TreeNodes -------------- */
2685  
2686      /**
2687 <     * Nodes for use in TreeBins
2687 >     * Nodes for use in TreeBins.
2688       */
2689      static final class TreeNode<K,V> extends Node<K,V> {
2690          TreeNode<K,V> parent;  // red-black tree links
# Line 2586 | Line 2710 | public class ConcurrentHashMap<K,V> impl
2710          final TreeNode<K,V> findTreeNode(int h, Object k, Class<?> kc) {
2711              if (k != null) {
2712                  TreeNode<K,V> p = this;
2713 <                do  {
2713 >                do {
2714                      int ph, dir; K pk; TreeNode<K,V> q;
2715                      TreeNode<K,V> pl = p.left, pr = p.right;
2716                      if ((ph = p.hash) > h)
# Line 2595 | Line 2719 | public class ConcurrentHashMap<K,V> impl
2719                          p = pr;
2720                      else if ((pk = p.key) == k || (pk != null && k.equals(pk)))
2721                          return p;
2722 <                    else if (pl == null && pr == null)
2723 <                        break;
2722 >                    else if (pl == null)
2723 >                        p = pr;
2724 >                    else if (pr == null)
2725 >                        p = pl;
2726                      else if ((kc != null ||
2727                                (kc = comparableClassFor(k)) != null) &&
2728                               (dir = compareComparables(kc, k, pk)) != 0)
2729                          p = (dir < 0) ? pl : pr;
2730 <                    else if (pl == null)
2605 <                        p = pr;
2606 <                    else if (pr == null ||
2607 <                             (q = pr.findTreeNode(h, k, kc)) == null)
2608 <                        p = pl;
2609 <                    else
2730 >                    else if ((q = pr.findTreeNode(h, k, kc)) != null)
2731                          return q;
2732 +                    else
2733 +                        p = pl;
2734                  } while (p != null);
2735              }
2736              return null;
# Line 2634 | Line 2757 | public class ConcurrentHashMap<K,V> impl
2757          static final int READER = 4; // increment value for setting read lock
2758  
2759          /**
2760 +         * Tie-breaking utility for ordering insertions when equal
2761 +         * hashCodes and non-comparable. We don't require a total
2762 +         * order, just a consistent insertion rule to maintain
2763 +         * equivalence across rebalancings. Tie-breaking further than
2764 +         * necessary simplifies testing a bit.
2765 +         */
2766 +        static int tieBreakOrder(Object a, Object b) {
2767 +            int d;
2768 +            if (a == null || b == null ||
2769 +                (d = a.getClass().getName().
2770 +                 compareTo(b.getClass().getName())) == 0)
2771 +                d = (System.identityHashCode(a) <= System.identityHashCode(b) ?
2772 +                     -1 : 1);
2773 +            return d;
2774 +        }
2775 +
2776 +        /**
2777           * Creates bin with initial set of nodes headed by b.
2778           */
2779          TreeBin(TreeNode<K,V> b) {
2780 <            super(TREEBIN, null, null, null);
2780 >            super(TREEBIN, null, null);
2781              this.first = b;
2782              TreeNode<K,V> r = null;
2783              for (TreeNode<K,V> x = b, next; x != null; x = next) {
# Line 2649 | Line 2789 | public class ConcurrentHashMap<K,V> impl
2789                      r = x;
2790                  }
2791                  else {
2792 <                    Object key = x.key;
2793 <                    int hash = x.hash;
2792 >                    K k = x.key;
2793 >                    int h = x.hash;
2794                      Class<?> kc = null;
2795                      for (TreeNode<K,V> p = r;;) {
2796                          int dir, ph;
2797 <                        if ((ph = p.hash) > hash)
2797 >                        K pk = p.key;
2798 >                        if ((ph = p.hash) > h)
2799                              dir = -1;
2800 <                        else if (ph < hash)
2800 >                        else if (ph < h)
2801                              dir = 1;
2802 <                        else if ((kc != null ||
2803 <                                  (kc = comparableClassFor(key)) != null))
2804 <                            dir = compareComparables(kc, key, p.key);
2805 <                        else
2665 <                            dir = 0;
2802 >                        else if ((kc == null &&
2803 >                                  (kc = comparableClassFor(k)) == null) ||
2804 >                                 (dir = compareComparables(kc, k, pk)) == 0)
2805 >                            dir = tieBreakOrder(k, pk);
2806                          TreeNode<K,V> xp = p;
2807                          if ((p = (dir <= 0) ? p.left : p.right) == null) {
2808                              x.parent = xp;
# Line 2677 | Line 2817 | public class ConcurrentHashMap<K,V> impl
2817                  }
2818              }
2819              this.root = r;
2820 +            assert checkInvariants(root);
2821          }
2822  
2823          /**
2824 <         * Acquires write lock for tree restructuring
2824 >         * Acquires write lock for tree restructuring.
2825           */
2826          private final void lockRoot() {
2827 <            if (!U.compareAndSwapInt(this, LOCKSTATE, 0, WRITER))
2827 >            if (!U.compareAndSetInt(this, LOCKSTATE, 0, WRITER))
2828                  contendedLock(); // offload to separate method
2829          }
2830  
2831          /**
2832 <         * Releases write lock for tree restructuring
2832 >         * Releases write lock for tree restructuring.
2833           */
2834          private final void unlockRoot() {
2835              lockState = 0;
2836          }
2837  
2838          /**
2839 <         * Possibly blocks awaiting root lock
2839 >         * Possibly blocks awaiting root lock.
2840           */
2841          private final void contendedLock() {
2842              boolean waiting = false;
2843              for (int s;;) {
2844 <                if (((s = lockState) & WRITER) == 0) {
2845 <                    if (U.compareAndSwapInt(this, LOCKSTATE, s, WRITER)) {
2844 >                if (((s = lockState) & ~WAITER) == 0) {
2845 >                    if (U.compareAndSetInt(this, LOCKSTATE, s, WRITER)) {
2846                          if (waiting)
2847                              waiter = null;
2848                          return;
2849                      }
2850                  }
2851 <                else if ((s | WAITER) == 0) {
2852 <                    if (U.compareAndSwapInt(this, LOCKSTATE, s, s | WAITER)) {
2851 >                else if ((s & WAITER) == 0) {
2852 >                    if (U.compareAndSetInt(this, LOCKSTATE, s, s | WAITER)) {
2853                          waiting = true;
2854                          waiter = Thread.currentThread();
2855                      }
# Line 2720 | Line 2861 | public class ConcurrentHashMap<K,V> impl
2861  
2862          /**
2863           * Returns matching node or null if none. Tries to search
2864 <         * using tree compareisons from root, but continues linear
2864 >         * using tree comparisons from root, but continues linear
2865           * search when lock not available.
2866           */
2867          final Node<K,V> find(int h, Object k) {
2868              if (k != null) {
2869 <                for (Node<K,V> e = first; e != null; e = e.next) {
2869 >                for (Node<K,V> e = first; e != null; ) {
2870                      int s; K ek;
2871                      if (((s = lockState) & (WAITER|WRITER)) != 0) {
2872                          if (e.hash == h &&
2873                              ((ek = e.key) == k || (ek != null && k.equals(ek))))
2874                              return e;
2875 +                        e = e.next;
2876                      }
2877 <                    else if (U.compareAndSwapInt(this, LOCKSTATE, s,
2877 >                    else if (U.compareAndSetInt(this, LOCKSTATE, s,
2878                                                   s + READER)) {
2879                          TreeNode<K,V> r, p;
2880                          try {
# Line 2757 | Line 2899 | public class ConcurrentHashMap<K,V> impl
2899           */
2900          final TreeNode<K,V> putTreeVal(int h, K k, V v) {
2901              Class<?> kc = null;
2902 +            boolean searched = false;
2903              for (TreeNode<K,V> p = root;;) {
2904 <                int dir, ph; K pk; TreeNode<K,V> q, pr;
2904 >                int dir, ph; K pk;
2905                  if (p == null) {
2906                      first = root = new TreeNode<K,V>(h, k, v, null, null);
2907                      break;
# Line 2772 | Line 2915 | public class ConcurrentHashMap<K,V> impl
2915                  else if ((kc == null &&
2916                            (kc = comparableClassFor(k)) == null) ||
2917                           (dir = compareComparables(kc, k, pk)) == 0) {
2918 <                    if (p.left == null)
2919 <                        dir = 1;
2920 <                    else if ((pr = p.right) == null ||
2921 <                             (q = pr.findTreeNode(h, k, kc)) == null)
2922 <                        dir = -1;
2923 <                    else
2924 <                        return q;
2918 >                    if (!searched) {
2919 >                        TreeNode<K,V> q, ch;
2920 >                        searched = true;
2921 >                        if (((ch = p.left) != null &&
2922 >                             (q = ch.findTreeNode(h, k, kc)) != null) ||
2923 >                            ((ch = p.right) != null &&
2924 >                             (q = ch.findTreeNode(h, k, kc)) != null))
2925 >                            return q;
2926 >                    }
2927 >                    dir = tieBreakOrder(k, pk);
2928                  }
2929 +
2930                  TreeNode<K,V> xp = p;
2931 <                if ((p = (dir < 0) ? p.left : p.right) == null) {
2931 >                if ((p = (dir <= 0) ? p.left : p.right) == null) {
2932                      TreeNode<K,V> x, f = first;
2933                      first = x = new TreeNode<K,V>(h, k, v, f, xp);
2934                      if (f != null)
2935                          f.prev = x;
2936 <                    if (dir < 0)
2936 >                    if (dir <= 0)
2937                          xp.left = x;
2938                      else
2939                          xp.right = x;
# Line 2815 | Line 2962 | public class ConcurrentHashMap<K,V> impl
2962           * that are accessible independently of lock. So instead we
2963           * swap the tree linkages.
2964           *
2965 <         * @return true if now too small so should be untreeified.
2965 >         * @return true if now too small, so should be untreeified
2966           */
2967          final boolean removeTreeNode(TreeNode<K,V> p) {
2968              TreeNode<K,V> next = (TreeNode<K,V>)p.next;
# Line 3009 | Line 3156 | public class ConcurrentHashMap<K,V> impl
3156  
3157          static <K,V> TreeNode<K,V> balanceDeletion(TreeNode<K,V> root,
3158                                                     TreeNode<K,V> x) {
3159 <            for (TreeNode<K,V> xp, xpl, xpr;;)  {
3159 >            for (TreeNode<K,V> xp, xpl, xpr;;) {
3160                  if (x == null || x == root)
3161                      return root;
3162                  else if ((xp = x.parent) == null) {
# Line 3100 | Line 3247 | public class ConcurrentHashMap<K,V> impl
3247          }
3248  
3249          /**
3250 <         * Recursive invariant check
3250 >         * Checks invariants recursively for the tree of Nodes rooted at t.
3251           */
3252          static <K,V> boolean checkInvariants(TreeNode<K,V> t) {
3253              TreeNode<K,V> tp = t.parent, tl = t.left, tr = t.right,
# Line 3124 | Line 3271 | public class ConcurrentHashMap<K,V> impl
3271              return true;
3272          }
3273  
3274 <        private static final sun.misc.Unsafe U;
3274 >        private static final Unsafe U = Unsafe.getUnsafe();
3275          private static final long LOCKSTATE;
3276          static {
3277              try {
3131                U = sun.misc.Unsafe.getUnsafe();
3132                Class<?> k = TreeBin.class;
3278                  LOCKSTATE = U.objectFieldOffset
3279 <                    (k.getDeclaredField("lockState"));
3280 <            } catch (Exception e) {
3281 <                throw new Error(e);
3279 >                    (TreeBin.class.getDeclaredField("lockState"));
3280 >            } catch (ReflectiveOperationException e) {
3281 >                throw new ExceptionInInitializerError(e);
3282              }
3283          }
3284      }
# Line 3141 | Line 3286 | public class ConcurrentHashMap<K,V> impl
3286      /* ----------------Table Traversal -------------- */
3287  
3288      /**
3289 +     * Records the table, its length, and current traversal index for a
3290 +     * traverser that must process a region of a forwarded table before
3291 +     * proceeding with current table.
3292 +     */
3293 +    static final class TableStack<K,V> {
3294 +        int length;
3295 +        int index;
3296 +        Node<K,V>[] tab;
3297 +        TableStack<K,V> next;
3298 +    }
3299 +
3300 +    /**
3301       * Encapsulates traversal for methods such as containsValue; also
3302       * serves as a base class for other iterators and spliterators.
3303       *
# Line 3164 | Line 3321 | public class ConcurrentHashMap<K,V> impl
3321      static class Traverser<K,V> {
3322          Node<K,V>[] tab;        // current table; updated if resized
3323          Node<K,V> next;         // the next entry to use
3324 +        TableStack<K,V> stack, spare; // to save/restore on ForwardingNodes
3325          int index;              // index of bin to use next
3326          int baseIndex;          // current index of initial table
3327          int baseLimit;          // index bound for initial table
# Line 3185 | Line 3343 | public class ConcurrentHashMap<K,V> impl
3343              if ((e = next) != null)
3344                  e = e.next;
3345              for (;;) {
3346 <                Node<K,V>[] t; int i, n; K ek;  // must use locals in checks
3346 >                Node<K,V>[] t; int i, n;  // must use locals in checks
3347                  if (e != null)
3348                      return next = e;
3349                  if (baseIndex >= baseLimit || (t = tab) == null ||
3350                      (n = t.length) <= (i = index) || i < 0)
3351                      return next = null;
3352 <                if ((e = tabAt(t, index)) != null && e.hash < 0) {
3352 >                if ((e = tabAt(t, i)) != null && e.hash < 0) {
3353                      if (e instanceof ForwardingNode) {
3354                          tab = ((ForwardingNode<K,V>)e).nextTable;
3355                          e = null;
3356 +                        pushState(t, i, n);
3357                          continue;
3358                      }
3359                      else if (e instanceof TreeBin)
# Line 3202 | Line 3361 | public class ConcurrentHashMap<K,V> impl
3361                      else
3362                          e = null;
3363                  }
3364 <                if ((index += baseSize) >= n)
3365 <                    index = ++baseIndex;    // visit upper slots if present
3364 >                if (stack != null)
3365 >                    recoverState(n);
3366 >                else if ((index = i + baseSize) >= n)
3367 >                    index = ++baseIndex; // visit upper slots if present
3368 >            }
3369 >        }
3370 >
3371 >        /**
3372 >         * Saves traversal state upon encountering a forwarding node.
3373 >         */
3374 >        private void pushState(Node<K,V>[] t, int i, int n) {
3375 >            TableStack<K,V> s = spare;  // reuse if possible
3376 >            if (s != null)
3377 >                spare = s.next;
3378 >            else
3379 >                s = new TableStack<K,V>();
3380 >            s.tab = t;
3381 >            s.length = n;
3382 >            s.index = i;
3383 >            s.next = stack;
3384 >            stack = s;
3385 >        }
3386 >
3387 >        /**
3388 >         * Possibly pops traversal state.
3389 >         *
3390 >         * @param n length of current table
3391 >         */
3392 >        private void recoverState(int n) {
3393 >            TableStack<K,V> s; int len;
3394 >            while ((s = stack) != null && (index += (len = s.length)) >= n) {
3395 >                n = len;
3396 >                index = s.index;
3397 >                tab = s.tab;
3398 >                s.tab = null;
3399 >                TableStack<K,V> next = s.next;
3400 >                s.next = spare; // save for reuse
3401 >                stack = next;
3402 >                spare = s;
3403              }
3404 +            if (s == null && (index += baseSize) >= n)
3405 +                index = ++baseIndex;
3406          }
3407      }
3408  
3409      /**
3410       * Base of key, value, and entry Iterators. Adds fields to
3411 <     * Traverser to support iterator.remove
3411 >     * Traverser to support iterator.remove.
3412       */
3413      static class BaseIterator<K,V> extends Traverser<K,V> {
3414          final ConcurrentHashMap<K,V> map;
# Line 3236 | Line 3434 | public class ConcurrentHashMap<K,V> impl
3434  
3435      static final class KeyIterator<K,V> extends BaseIterator<K,V>
3436          implements Iterator<K>, Enumeration<K> {
3437 <        KeyIterator(Node<K,V>[] tab, int index, int size, int limit,
3437 >        KeyIterator(Node<K,V>[] tab, int size, int index, int limit,
3438                      ConcurrentHashMap<K,V> map) {
3439 <            super(tab, index, size, limit, map);
3439 >            super(tab, size, index, limit, map);
3440          }
3441  
3442          public final K next() {
# Line 3256 | Line 3454 | public class ConcurrentHashMap<K,V> impl
3454  
3455      static final class ValueIterator<K,V> extends BaseIterator<K,V>
3456          implements Iterator<V>, Enumeration<V> {
3457 <        ValueIterator(Node<K,V>[] tab, int index, int size, int limit,
3457 >        ValueIterator(Node<K,V>[] tab, int size, int index, int limit,
3458                        ConcurrentHashMap<K,V> map) {
3459 <            super(tab, index, size, limit, map);
3459 >            super(tab, size, index, limit, map);
3460          }
3461  
3462          public final V next() {
# Line 3276 | Line 3474 | public class ConcurrentHashMap<K,V> impl
3474  
3475      static final class EntryIterator<K,V> extends BaseIterator<K,V>
3476          implements Iterator<Map.Entry<K,V>> {
3477 <        EntryIterator(Node<K,V>[] tab, int index, int size, int limit,
3477 >        EntryIterator(Node<K,V>[] tab, int size, int index, int limit,
3478                        ConcurrentHashMap<K,V> map) {
3479 <            super(tab, index, size, limit, map);
3479 >            super(tab, size, index, limit, map);
3480          }
3481  
3482          public final Map.Entry<K,V> next() {
# Line 3294 | Line 3492 | public class ConcurrentHashMap<K,V> impl
3492      }
3493  
3494      /**
3495 <     * Exported Entry for EntryIterator
3495 >     * Exported Entry for EntryIterator.
3496       */
3497      static final class MapEntry<K,V> implements Map.Entry<K,V> {
3498          final K key; // non-null
# Line 3308 | Line 3506 | public class ConcurrentHashMap<K,V> impl
3506          public K getKey()        { return key; }
3507          public V getValue()      { return val; }
3508          public int hashCode()    { return key.hashCode() ^ val.hashCode(); }
3509 <        public String toString() { return key + "=" + val; }
3509 >        public String toString() {
3510 >            return Helpers.mapEntryToString(key, val);
3511 >        }
3512  
3513          public boolean equals(Object o) {
3514              Object k, v; Map.Entry<?,?> e;
# Line 3345 | Line 3545 | public class ConcurrentHashMap<K,V> impl
3545              this.est = est;
3546          }
3547  
3548 <        public Spliterator<K> trySplit() {
3548 >        public KeySpliterator<K,V> trySplit() {
3549              int i, f, h;
3550              return (h = ((i = baseIndex) + (f = baseLimit)) >>> 1) <= i ? null :
3551                  new KeySpliterator<K,V>(tab, baseSize, baseLimit = h,
# Line 3384 | Line 3584 | public class ConcurrentHashMap<K,V> impl
3584              this.est = est;
3585          }
3586  
3587 <        public Spliterator<V> trySplit() {
3587 >        public ValueSpliterator<K,V> trySplit() {
3588              int i, f, h;
3589              return (h = ((i = baseIndex) + (f = baseLimit)) >>> 1) <= i ? null :
3590                  new ValueSpliterator<K,V>(tab, baseSize, baseLimit = h,
# Line 3424 | Line 3624 | public class ConcurrentHashMap<K,V> impl
3624              this.est = est;
3625          }
3626  
3627 <        public Spliterator<Map.Entry<K,V>> trySplit() {
3627 >        public EntrySpliterator<K,V> trySplit() {
3628              int i, f, h;
3629              return (h = ((i = baseIndex) + (f = baseLimit)) >>> 1) <= i ? null :
3630                  new EntrySpliterator<K,V>(tab, baseSize, baseLimit = h,
# Line 3498 | Line 3698 | public class ConcurrentHashMap<K,V> impl
3698       * for an element, or null if there is no transformation (in
3699       * which case the action is not applied)
3700       * @param action the action
3701 +     * @param <U> the return type of the transformer
3702       * @since 1.8
3703       */
3704      public <U> void forEach(long parallelismThreshold,
# Line 3521 | Line 3722 | public class ConcurrentHashMap<K,V> impl
3722       * needed for this operation to be executed in parallel
3723       * @param searchFunction a function returning a non-null
3724       * result on success, else null
3725 +     * @param <U> the return type of the search function
3726       * @return a non-null result from applying the given search
3727       * function on each (key, value), or null if none
3728       * @since 1.8
# Line 3544 | Line 3746 | public class ConcurrentHashMap<K,V> impl
3746       * for an element, or null if there is no transformation (in
3747       * which case it is not combined)
3748       * @param reducer a commutative associative combining function
3749 +     * @param <U> the return type of the transformer
3750       * @return the result of accumulating the given transformation
3751       * of all (key, value) pairs
3752       * @since 1.8
# Line 3573 | Line 3776 | public class ConcurrentHashMap<K,V> impl
3776       * of all (key, value) pairs
3777       * @since 1.8
3778       */
3779 <    public double reduceToDoubleIn(long parallelismThreshold,
3780 <                                   ToDoubleBiFunction<? super K, ? super V> transformer,
3781 <                                   double basis,
3782 <                                   DoubleBinaryOperator reducer) {
3779 >    public double reduceToDouble(long parallelismThreshold,
3780 >                                 ToDoubleBiFunction<? super K, ? super V> transformer,
3781 >                                 double basis,
3782 >                                 DoubleBinaryOperator reducer) {
3783          if (transformer == null || reducer == null)
3784              throw new NullPointerException();
3785          return new MapReduceMappingsToDoubleTask<K,V>
# Line 3662 | Line 3865 | public class ConcurrentHashMap<K,V> impl
3865       * for an element, or null if there is no transformation (in
3866       * which case the action is not applied)
3867       * @param action the action
3868 +     * @param <U> the return type of the transformer
3869       * @since 1.8
3870       */
3871      public <U> void forEachKey(long parallelismThreshold,
# Line 3685 | Line 3889 | public class ConcurrentHashMap<K,V> impl
3889       * needed for this operation to be executed in parallel
3890       * @param searchFunction a function returning a non-null
3891       * result on success, else null
3892 +     * @param <U> the return type of the search function
3893       * @return a non-null result from applying the given search
3894       * function on each key, or null if none
3895       * @since 1.8
# Line 3727 | Line 3932 | public class ConcurrentHashMap<K,V> impl
3932       * for an element, or null if there is no transformation (in
3933       * which case it is not combined)
3934       * @param reducer a commutative associative combining function
3935 +     * @param <U> the return type of the transformer
3936       * @return the result of accumulating the given transformation
3937       * of all keys
3938       * @since 1.8
# Line 3846 | Line 4052 | public class ConcurrentHashMap<K,V> impl
4052       * for an element, or null if there is no transformation (in
4053       * which case the action is not applied)
4054       * @param action the action
4055 +     * @param <U> the return type of the transformer
4056       * @since 1.8
4057       */
4058      public <U> void forEachValue(long parallelismThreshold,
# Line 3869 | Line 4076 | public class ConcurrentHashMap<K,V> impl
4076       * needed for this operation to be executed in parallel
4077       * @param searchFunction a function returning a non-null
4078       * result on success, else null
4079 +     * @param <U> the return type of the search function
4080       * @return a non-null result from applying the given search
4081       * function on each value, or null if none
4082       * @since 1.8
# Line 3910 | Line 4118 | public class ConcurrentHashMap<K,V> impl
4118       * for an element, or null if there is no transformation (in
4119       * which case it is not combined)
4120       * @param reducer a commutative associative combining function
4121 +     * @param <U> the return type of the transformer
4122       * @return the result of accumulating the given transformation
4123       * of all values
4124       * @since 1.8
# Line 4027 | Line 4236 | public class ConcurrentHashMap<K,V> impl
4236       * for an element, or null if there is no transformation (in
4237       * which case the action is not applied)
4238       * @param action the action
4239 +     * @param <U> the return type of the transformer
4240       * @since 1.8
4241       */
4242      public <U> void forEachEntry(long parallelismThreshold,
# Line 4050 | Line 4260 | public class ConcurrentHashMap<K,V> impl
4260       * needed for this operation to be executed in parallel
4261       * @param searchFunction a function returning a non-null
4262       * result on success, else null
4263 +     * @param <U> the return type of the search function
4264       * @return a non-null result from applying the given search
4265       * function on each entry, or null if none
4266       * @since 1.8
# Line 4091 | Line 4302 | public class ConcurrentHashMap<K,V> impl
4302       * for an element, or null if there is no transformation (in
4303       * which case it is not combined)
4304       * @param reducer a commutative associative combining function
4305 +     * @param <U> the return type of the transformer
4306       * @return the result of accumulating the given transformation
4307       * of all entries
4308       * @since 1.8
# Line 4213 | Line 4425 | public class ConcurrentHashMap<K,V> impl
4425          // implementations below rely on concrete classes supplying these
4426          // abstract methods
4427          /**
4428 <         * Returns a "weakly consistent" iterator that will never
4429 <         * throw {@link ConcurrentModificationException}, and
4430 <         * guarantees to traverse elements as they existed upon
4431 <         * construction of the iterator, and may (but is not
4432 <         * guaranteed to) reflect any modifications subsequent to
4433 <         * construction.
4428 >         * Returns an iterator over the elements in this collection.
4429 >         *
4430 >         * <p>The returned iterator is
4431 >         * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>.
4432 >         *
4433 >         * @return an iterator over the elements in this collection
4434           */
4435          public abstract Iterator<E> iterator();
4436          public abstract boolean contains(Object o);
4437          public abstract boolean remove(Object o);
4438  
4439 <        private static final String oomeMsg = "Required array size too large";
4439 >        private static final String OOME_MSG = "Required array size too large";
4440  
4441          public final Object[] toArray() {
4442              long sz = map.mappingCount();
4443              if (sz > MAX_ARRAY_SIZE)
4444 <                throw new OutOfMemoryError(oomeMsg);
4444 >                throw new OutOfMemoryError(OOME_MSG);
4445              int n = (int)sz;
4446              Object[] r = new Object[n];
4447              int i = 0;
4448              for (E e : this) {
4449                  if (i == n) {
4450                      if (n >= MAX_ARRAY_SIZE)
4451 <                        throw new OutOfMemoryError(oomeMsg);
4451 >                        throw new OutOfMemoryError(OOME_MSG);
4452                      if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
4453                          n = MAX_ARRAY_SIZE;
4454                      else
# Line 4252 | Line 4464 | public class ConcurrentHashMap<K,V> impl
4464          public final <T> T[] toArray(T[] a) {
4465              long sz = map.mappingCount();
4466              if (sz > MAX_ARRAY_SIZE)
4467 <                throw new OutOfMemoryError(oomeMsg);
4467 >                throw new OutOfMemoryError(OOME_MSG);
4468              int m = (int)sz;
4469              T[] r = (a.length >= m) ? a :
4470                  (T[])java.lang.reflect.Array
# Line 4262 | Line 4474 | public class ConcurrentHashMap<K,V> impl
4474              for (E e : this) {
4475                  if (i == n) {
4476                      if (n >= MAX_ARRAY_SIZE)
4477 <                        throw new OutOfMemoryError(oomeMsg);
4477 >                        throw new OutOfMemoryError(OOME_MSG);
4478                      if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
4479                          n = MAX_ARRAY_SIZE;
4480                      else
# Line 4315 | Line 4527 | public class ConcurrentHashMap<K,V> impl
4527              return true;
4528          }
4529  
4530 <        public final boolean removeAll(Collection<?> c) {
4530 >        public boolean removeAll(Collection<?> c) {
4531 >            if (c == null) throw new NullPointerException();
4532              boolean modified = false;
4533 <            for (Iterator<E> it = iterator(); it.hasNext();) {
4534 <                if (c.contains(it.next())) {
4535 <                    it.remove();
4536 <                    modified = true;
4533 >            // Use (c instanceof Set) as a hint that lookup in c is as
4534 >            // efficient as this view
4535 >            Node<K,V>[] t;
4536 >            if ((t = map.table) == null) {
4537 >                return false;
4538 >            } else if (c instanceof Set<?> && c.size() > t.length) {
4539 >                for (Iterator<?> it = iterator(); it.hasNext(); ) {
4540 >                    if (c.contains(it.next())) {
4541 >                        it.remove();
4542 >                        modified = true;
4543 >                    }
4544                  }
4545 +            } else {
4546 +                for (Object e : c)
4547 +                    modified |= remove(e);
4548              }
4549              return modified;
4550          }
4551  
4552          public final boolean retainAll(Collection<?> c) {
4553 +            if (c == null) throw new NullPointerException();
4554              boolean modified = false;
4555              for (Iterator<E> it = iterator(); it.hasNext();) {
4556                  if (!c.contains(it.next())) {
# Line 4507 | Line 4731 | public class ConcurrentHashMap<K,V> impl
4731              throw new UnsupportedOperationException();
4732          }
4733  
4734 +        @Override public boolean removeAll(Collection<?> c) {
4735 +            if (c == null) throw new NullPointerException();
4736 +            boolean modified = false;
4737 +            for (Iterator<V> it = iterator(); it.hasNext();) {
4738 +                if (c.contains(it.next())) {
4739 +                    it.remove();
4740 +                    modified = true;
4741 +                }
4742 +            }
4743 +            return modified;
4744 +        }
4745 +
4746 +        public boolean removeIf(Predicate<? super V> filter) {
4747 +            return map.removeValueIf(filter);
4748 +        }
4749 +
4750          public Spliterator<V> spliterator() {
4751              Node<K,V>[] t;
4752              ConcurrentHashMap<K,V> m = map;
# Line 4576 | Line 4816 | public class ConcurrentHashMap<K,V> impl
4816              return added;
4817          }
4818  
4819 +        public boolean removeIf(Predicate<? super Entry<K,V>> filter) {
4820 +            return map.removeEntryIf(filter);
4821 +        }
4822 +
4823          public final int hashCode() {
4824              int h = 0;
4825              Node<K,V>[] t;
# Line 4621 | Line 4865 | public class ConcurrentHashMap<K,V> impl
4865       * Base class for bulk tasks. Repeats some fields and code from
4866       * class Traverser, because we need to subclass CountedCompleter.
4867       */
4868 +    @SuppressWarnings("serial")
4869      abstract static class BulkTask<K,V,R> extends CountedCompleter<R> {
4870          Node<K,V>[] tab;        // same as Traverser
4871          Node<K,V> next;
4872 +        TableStack<K,V> stack, spare;
4873          int index;
4874          int baseIndex;
4875          int baseLimit;
# Line 4645 | Line 4891 | public class ConcurrentHashMap<K,V> impl
4891          }
4892  
4893          /**
4894 <         * Same as Traverser version
4894 >         * Same as Traverser version.
4895           */
4896          final Node<K,V> advance() {
4897              Node<K,V> e;
4898              if ((e = next) != null)
4899                  e = e.next;
4900              for (;;) {
4901 <                Node<K,V>[] t; int i, n; K ek;  // must use locals in checks
4901 >                Node<K,V>[] t; int i, n;
4902                  if (e != null)
4903                      return next = e;
4904                  if (baseIndex >= baseLimit || (t = tab) == null ||
4905                      (n = t.length) <= (i = index) || i < 0)
4906                      return next = null;
4907 <                if ((e = tabAt(t, index)) != null && e.hash < 0) {
4907 >                if ((e = tabAt(t, i)) != null && e.hash < 0) {
4908                      if (e instanceof ForwardingNode) {
4909                          tab = ((ForwardingNode<K,V>)e).nextTable;
4910                          e = null;
4911 +                        pushState(t, i, n);
4912                          continue;
4913                      }
4914                      else if (e instanceof TreeBin)
# Line 4669 | Line 4916 | public class ConcurrentHashMap<K,V> impl
4916                      else
4917                          e = null;
4918                  }
4919 <                if ((index += baseSize) >= n)
4920 <                    index = ++baseIndex;    // visit upper slots if present
4919 >                if (stack != null)
4920 >                    recoverState(n);
4921 >                else if ((index = i + baseSize) >= n)
4922 >                    index = ++baseIndex;
4923              }
4924          }
4925 +
4926 +        private void pushState(Node<K,V>[] t, int i, int n) {
4927 +            TableStack<K,V> s = spare;
4928 +            if (s != null)
4929 +                spare = s.next;
4930 +            else
4931 +                s = new TableStack<K,V>();
4932 +            s.tab = t;
4933 +            s.length = n;
4934 +            s.index = i;
4935 +            s.next = stack;
4936 +            stack = s;
4937 +        }
4938 +
4939 +        private void recoverState(int n) {
4940 +            TableStack<K,V> s; int len;
4941 +            while ((s = stack) != null && (index += (len = s.length)) >= n) {
4942 +                n = len;
4943 +                index = s.index;
4944 +                tab = s.tab;
4945 +                s.tab = null;
4946 +                TableStack<K,V> next = s.next;
4947 +                s.next = spare; // save for reuse
4948 +                stack = next;
4949 +                spare = s;
4950 +            }
4951 +            if (s == null && (index += baseSize) >= n)
4952 +                index = ++baseIndex;
4953 +        }
4954      }
4955  
4956      /*
# Line 5131 | Line 5409 | public class ConcurrentHashMap<K,V> impl
5409                  result = r;
5410                  CountedCompleter<?> c;
5411                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5412 <                    @SuppressWarnings("unchecked") ReduceKeysTask<K,V>
5412 >                    @SuppressWarnings("unchecked")
5413 >                    ReduceKeysTask<K,V>
5414                          t = (ReduceKeysTask<K,V>)c,
5415                          s = t.rights;
5416                      while (s != null) {
# Line 5178 | Line 5457 | public class ConcurrentHashMap<K,V> impl
5457                  result = r;
5458                  CountedCompleter<?> c;
5459                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5460 <                    @SuppressWarnings("unchecked") ReduceValuesTask<K,V>
5460 >                    @SuppressWarnings("unchecked")
5461 >                    ReduceValuesTask<K,V>
5462                          t = (ReduceValuesTask<K,V>)c,
5463                          s = t.rights;
5464                      while (s != null) {
# Line 5223 | Line 5503 | public class ConcurrentHashMap<K,V> impl
5503                  result = r;
5504                  CountedCompleter<?> c;
5505                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5506 <                    @SuppressWarnings("unchecked") ReduceEntriesTask<K,V>
5506 >                    @SuppressWarnings("unchecked")
5507 >                    ReduceEntriesTask<K,V>
5508                          t = (ReduceEntriesTask<K,V>)c,
5509                          s = t.rights;
5510                      while (s != null) {
# Line 5276 | Line 5557 | public class ConcurrentHashMap<K,V> impl
5557                  result = r;
5558                  CountedCompleter<?> c;
5559                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5560 <                    @SuppressWarnings("unchecked") MapReduceKeysTask<K,V,U>
5560 >                    @SuppressWarnings("unchecked")
5561 >                    MapReduceKeysTask<K,V,U>
5562                          t = (MapReduceKeysTask<K,V,U>)c,
5563                          s = t.rights;
5564                      while (s != null) {
# Line 5329 | Line 5611 | public class ConcurrentHashMap<K,V> impl
5611                  result = r;
5612                  CountedCompleter<?> c;
5613                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5614 <                    @SuppressWarnings("unchecked") MapReduceValuesTask<K,V,U>
5614 >                    @SuppressWarnings("unchecked")
5615 >                    MapReduceValuesTask<K,V,U>
5616                          t = (MapReduceValuesTask<K,V,U>)c,
5617                          s = t.rights;
5618                      while (s != null) {
# Line 5382 | Line 5665 | public class ConcurrentHashMap<K,V> impl
5665                  result = r;
5666                  CountedCompleter<?> c;
5667                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5668 <                    @SuppressWarnings("unchecked") MapReduceEntriesTask<K,V,U>
5668 >                    @SuppressWarnings("unchecked")
5669 >                    MapReduceEntriesTask<K,V,U>
5670                          t = (MapReduceEntriesTask<K,V,U>)c,
5671                          s = t.rights;
5672                      while (s != null) {
# Line 5435 | Line 5719 | public class ConcurrentHashMap<K,V> impl
5719                  result = r;
5720                  CountedCompleter<?> c;
5721                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5722 <                    @SuppressWarnings("unchecked") MapReduceMappingsTask<K,V,U>
5722 >                    @SuppressWarnings("unchecked")
5723 >                    MapReduceMappingsTask<K,V,U>
5724                          t = (MapReduceMappingsTask<K,V,U>)c,
5725                          s = t.rights;
5726                      while (s != null) {
# Line 5487 | Line 5772 | public class ConcurrentHashMap<K,V> impl
5772                  result = r;
5773                  CountedCompleter<?> c;
5774                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5775 <                    @SuppressWarnings("unchecked") MapReduceKeysToDoubleTask<K,V>
5775 >                    @SuppressWarnings("unchecked")
5776 >                    MapReduceKeysToDoubleTask<K,V>
5777                          t = (MapReduceKeysToDoubleTask<K,V>)c,
5778                          s = t.rights;
5779                      while (s != null) {
# Line 5536 | Line 5822 | public class ConcurrentHashMap<K,V> impl
5822                  result = r;
5823                  CountedCompleter<?> c;
5824                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5825 <                    @SuppressWarnings("unchecked") MapReduceValuesToDoubleTask<K,V>
5825 >                    @SuppressWarnings("unchecked")
5826 >                    MapReduceValuesToDoubleTask<K,V>
5827                          t = (MapReduceValuesToDoubleTask<K,V>)c,
5828                          s = t.rights;
5829                      while (s != null) {
# Line 5585 | Line 5872 | public class ConcurrentHashMap<K,V> impl
5872                  result = r;
5873                  CountedCompleter<?> c;
5874                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5875 <                    @SuppressWarnings("unchecked") MapReduceEntriesToDoubleTask<K,V>
5875 >                    @SuppressWarnings("unchecked")
5876 >                    MapReduceEntriesToDoubleTask<K,V>
5877                          t = (MapReduceEntriesToDoubleTask<K,V>)c,
5878                          s = t.rights;
5879                      while (s != null) {
# Line 5634 | Line 5922 | public class ConcurrentHashMap<K,V> impl
5922                  result = r;
5923                  CountedCompleter<?> c;
5924                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5925 <                    @SuppressWarnings("unchecked") MapReduceMappingsToDoubleTask<K,V>
5925 >                    @SuppressWarnings("unchecked")
5926 >                    MapReduceMappingsToDoubleTask<K,V>
5927                          t = (MapReduceMappingsToDoubleTask<K,V>)c,
5928                          s = t.rights;
5929                      while (s != null) {
# Line 5683 | Line 5972 | public class ConcurrentHashMap<K,V> impl
5972                  result = r;
5973                  CountedCompleter<?> c;
5974                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5975 <                    @SuppressWarnings("unchecked") MapReduceKeysToLongTask<K,V>
5975 >                    @SuppressWarnings("unchecked")
5976 >                    MapReduceKeysToLongTask<K,V>
5977                          t = (MapReduceKeysToLongTask<K,V>)c,
5978                          s = t.rights;
5979                      while (s != null) {
# Line 5732 | Line 6022 | public class ConcurrentHashMap<K,V> impl
6022                  result = r;
6023                  CountedCompleter<?> c;
6024                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6025 <                    @SuppressWarnings("unchecked") MapReduceValuesToLongTask<K,V>
6025 >                    @SuppressWarnings("unchecked")
6026 >                    MapReduceValuesToLongTask<K,V>
6027                          t = (MapReduceValuesToLongTask<K,V>)c,
6028                          s = t.rights;
6029                      while (s != null) {
# Line 5781 | Line 6072 | public class ConcurrentHashMap<K,V> impl
6072                  result = r;
6073                  CountedCompleter<?> c;
6074                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6075 <                    @SuppressWarnings("unchecked") MapReduceEntriesToLongTask<K,V>
6075 >                    @SuppressWarnings("unchecked")
6076 >                    MapReduceEntriesToLongTask<K,V>
6077                          t = (MapReduceEntriesToLongTask<K,V>)c,
6078                          s = t.rights;
6079                      while (s != null) {
# Line 5830 | Line 6122 | public class ConcurrentHashMap<K,V> impl
6122                  result = r;
6123                  CountedCompleter<?> c;
6124                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6125 <                    @SuppressWarnings("unchecked") MapReduceMappingsToLongTask<K,V>
6125 >                    @SuppressWarnings("unchecked")
6126 >                    MapReduceMappingsToLongTask<K,V>
6127                          t = (MapReduceMappingsToLongTask<K,V>)c,
6128                          s = t.rights;
6129                      while (s != null) {
# Line 5879 | Line 6172 | public class ConcurrentHashMap<K,V> impl
6172                  result = r;
6173                  CountedCompleter<?> c;
6174                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6175 <                    @SuppressWarnings("unchecked") MapReduceKeysToIntTask<K,V>
6175 >                    @SuppressWarnings("unchecked")
6176 >                    MapReduceKeysToIntTask<K,V>
6177                          t = (MapReduceKeysToIntTask<K,V>)c,
6178                          s = t.rights;
6179                      while (s != null) {
# Line 5928 | Line 6222 | public class ConcurrentHashMap<K,V> impl
6222                  result = r;
6223                  CountedCompleter<?> c;
6224                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6225 <                    @SuppressWarnings("unchecked") MapReduceValuesToIntTask<K,V>
6225 >                    @SuppressWarnings("unchecked")
6226 >                    MapReduceValuesToIntTask<K,V>
6227                          t = (MapReduceValuesToIntTask<K,V>)c,
6228                          s = t.rights;
6229                      while (s != null) {
# Line 5977 | Line 6272 | public class ConcurrentHashMap<K,V> impl
6272                  result = r;
6273                  CountedCompleter<?> c;
6274                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6275 <                    @SuppressWarnings("unchecked") MapReduceEntriesToIntTask<K,V>
6275 >                    @SuppressWarnings("unchecked")
6276 >                    MapReduceEntriesToIntTask<K,V>
6277                          t = (MapReduceEntriesToIntTask<K,V>)c,
6278                          s = t.rights;
6279                      while (s != null) {
# Line 6026 | Line 6322 | public class ConcurrentHashMap<K,V> impl
6322                  result = r;
6323                  CountedCompleter<?> c;
6324                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6325 <                    @SuppressWarnings("unchecked") MapReduceMappingsToIntTask<K,V>
6325 >                    @SuppressWarnings("unchecked")
6326 >                    MapReduceMappingsToIntTask<K,V>
6327                          t = (MapReduceMappingsToIntTask<K,V>)c,
6328                          s = t.rights;
6329                      while (s != null) {
# Line 6039 | Line 6336 | public class ConcurrentHashMap<K,V> impl
6336      }
6337  
6338      // Unsafe mechanics
6339 <    private static final sun.misc.Unsafe U;
6339 >    private static final Unsafe U = Unsafe.getUnsafe();
6340      private static final long SIZECTL;
6341      private static final long TRANSFERINDEX;
6045    private static final long TRANSFERORIGIN;
6342      private static final long BASECOUNT;
6343      private static final long CELLSBUSY;
6344      private static final long CELLVALUE;
6345 <    private static final long ABASE;
6345 >    private static final int ABASE;
6346      private static final int ASHIFT;
6347  
6348      static {
6349          try {
6054            U = sun.misc.Unsafe.getUnsafe();
6055            Class<?> k = ConcurrentHashMap.class;
6350              SIZECTL = U.objectFieldOffset
6351 <                (k.getDeclaredField("sizeCtl"));
6351 >                (ConcurrentHashMap.class.getDeclaredField("sizeCtl"));
6352              TRANSFERINDEX = U.objectFieldOffset
6353 <                (k.getDeclaredField("transferIndex"));
6060 <            TRANSFERORIGIN = U.objectFieldOffset
6061 <                (k.getDeclaredField("transferOrigin"));
6353 >                (ConcurrentHashMap.class.getDeclaredField("transferIndex"));
6354              BASECOUNT = U.objectFieldOffset
6355 <                (k.getDeclaredField("baseCount"));
6355 >                (ConcurrentHashMap.class.getDeclaredField("baseCount"));
6356              CELLSBUSY = U.objectFieldOffset
6357 <                (k.getDeclaredField("cellsBusy"));
6358 <            Class<?> ck = CounterCell.class;
6357 >                (ConcurrentHashMap.class.getDeclaredField("cellsBusy"));
6358 >
6359              CELLVALUE = U.objectFieldOffset
6360 <                (ck.getDeclaredField("value"));
6361 <            Class<?> ak = Node[].class;
6362 <            ABASE = U.arrayBaseOffset(ak);
6363 <            int scale = U.arrayIndexScale(ak);
6360 >                (CounterCell.class.getDeclaredField("value"));
6361 >
6362 >            ABASE = U.arrayBaseOffset(Node[].class);
6363 >            int scale = U.arrayIndexScale(Node[].class);
6364              if ((scale & (scale - 1)) != 0)
6365 <                throw new Error("data type scale not a power of two");
6365 >                throw new Error("array index scale not a power of two");
6366              ASHIFT = 31 - Integer.numberOfLeadingZeros(scale);
6367 <        } catch (Exception e) {
6368 <            throw new Error(e);
6367 >        } catch (ReflectiveOperationException e) {
6368 >            throw new ExceptionInInitializerError(e);
6369          }
6370 +
6371 +        // Reduce the risk of rare disastrous classloading in first call to
6372 +        // LockSupport.park: https://bugs.openjdk.java.net/browse/JDK-8074773
6373 +        Class<?> ensureLoaded = LockSupport.class;
6374      }
6375   }

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines