ViewVC Help
View File | Revision Log | Show Annotations | Download File | Root Listing
root/jsr166/jsr166/src/main/java/util/concurrent/ConcurrentHashMap.java
(Generate patch)

Comparing jsr166/src/main/java/util/concurrent/ConcurrentHashMap.java (file contents):
Revision 1.228 by jsr166, Tue Jun 18 18:39:14 2013 UTC vs.
Revision 1.263 by jsr166, Sun Jan 4 01:17:26 2015 UTC

# Line 10 | Line 10 | import java.io.ObjectStreamField;
10   import java.io.Serializable;
11   import java.lang.reflect.ParameterizedType;
12   import java.lang.reflect.Type;
13 + import java.util.AbstractMap;
14   import java.util.Arrays;
15   import java.util.Collection;
15 import java.util.Comparator;
16 import java.util.ConcurrentModificationException;
16   import java.util.Enumeration;
17   import java.util.HashMap;
18   import java.util.Hashtable;
# Line 22 | Line 21 | import java.util.Map;
21   import java.util.NoSuchElementException;
22   import java.util.Set;
23   import java.util.Spliterator;
25 import java.util.concurrent.ConcurrentMap;
26 import java.util.concurrent.ForkJoinPool;
24   import java.util.concurrent.atomic.AtomicReference;
25   import java.util.concurrent.locks.LockSupport;
26   import java.util.concurrent.locks.ReentrantLock;
27   import java.util.function.BiConsumer;
28   import java.util.function.BiFunction;
32 import java.util.function.BinaryOperator;
29   import java.util.function.Consumer;
30   import java.util.function.DoubleBinaryOperator;
31   import java.util.function.Function;
# Line 64 | Line 60 | import java.util.stream.Stream;
60   * that key reporting the updated value.)  For aggregate operations
61   * such as {@code putAll} and {@code clear}, concurrent retrievals may
62   * reflect insertion or removal of only some entries.  Similarly,
63 < * Iterators and Enumerations return elements reflecting the state of
64 < * the hash table at some point at or since the creation of the
63 > * Iterators, Spliterators and Enumerations return elements reflecting the
64 > * state of the hash table at some point at or since the creation of the
65   * iterator/enumeration.  They do <em>not</em> throw {@link
66 < * ConcurrentModificationException}.  However, iterators are designed
67 < * to be used by only one thread at a time.  Bear in mind that the
68 < * results of aggregate status methods including {@code size}, {@code
69 < * isEmpty}, and {@code containsValue} are typically useful only when
70 < * a map is not undergoing concurrent updates in other threads.
66 > * java.util.ConcurrentModificationException ConcurrentModificationException}.
67 > * However, iterators are designed to be used by only one thread at a time.
68 > * Bear in mind that the results of aggregate status methods including
69 > * {@code size}, {@code isEmpty}, and {@code containsValue} are typically
70 > * useful only when a map is not undergoing concurrent updates in other threads.
71   * Otherwise the results of these methods reflect transient states
72   * that may be adequate for monitoring or estimation purposes, but not
73   * for program control.
# Line 104 | Line 100 | import java.util.stream.Stream;
100   * mapped values are (perhaps transiently) not used or all take the
101   * same mapping value.
102   *
103 < * <p>A ConcurrentHashMap can be used as scalable frequency map (a
103 > * <p>A ConcurrentHashMap can be used as a scalable frequency map (a
104   * form of histogram or multiset) by using {@link
105   * java.util.concurrent.atomic.LongAdder} values and initializing via
106   * {@link #computeIfAbsent computeIfAbsent}. For example, to add a count
107   * to a {@code ConcurrentHashMap<String,LongAdder> freqs}, you can use
108 < * {@code freqs.computeIfAbsent(k -> new LongAdder()).increment();}
108 > * {@code freqs.computeIfAbsent(key, k -> new LongAdder()).increment();}
109   *
110   * <p>This class and its views and iterators implement all of the
111   * <em>optional</em> methods of the {@link Map} and {@link Iterator}
# Line 131 | Line 127 | import java.util.stream.Stream;
127   * of supplied functions should not depend on any ordering, or on any
128   * other objects or values that may transiently change while
129   * computation is in progress; and except for forEach actions, should
130 < * ideally be side-effect-free. Bulk operations on {@link Map.Entry}
130 > * ideally be side-effect-free. Bulk operations on {@link java.util.Map.Entry}
131   * objects do not support method {@code setValue}.
132   *
133   * <ul>
# Line 235 | Line 231 | import java.util.stream.Stream;
231   * @param <K> the type of keys maintained by this map
232   * @param <V> the type of mapped values
233   */
234 < public class ConcurrentHashMap<K,V> implements ConcurrentMap<K,V>, Serializable {
234 > public class ConcurrentHashMap<K,V> extends AbstractMap<K,V>
235 >    implements ConcurrentMap<K,V>, Serializable {
236      private static final long serialVersionUID = 7249069246763182397L;
237  
238      /*
# Line 262 | Line 259 | public class ConcurrentHashMap<K,V> impl
259       * because they have negative hash fields and null key and value
260       * fields. (These special nodes are either uncommon or transient,
261       * so the impact of carrying around some unused fields is
262 <     * insignficant.)
262 >     * insignificant.)
263       *
264       * The table is lazily initialized to a power-of-two size upon the
265       * first insertion.  Each bin in the table normally contains a
# Line 343 | Line 340 | public class ConcurrentHashMap<K,V> impl
340       * The table is resized when occupancy exceeds a percentage
341       * threshold (nominally, 0.75, but see below).  Any thread
342       * noticing an overfull bin may assist in resizing after the
343 <     * initiating thread allocates and sets up the replacement
344 <     * array. However, rather than stalling, these other threads may
345 <     * proceed with insertions etc.  The use of TreeBins shields us
346 <     * from the worst case effects of overfilling while resizes are in
343 >     * initiating thread allocates and sets up the replacement array.
344 >     * However, rather than stalling, these other threads may proceed
345 >     * with insertions etc.  The use of TreeBins shields us from the
346 >     * worst case effects of overfilling while resizes are in
347       * progress.  Resizing proceeds by transferring bins, one by one,
348 <     * from the table to the next table. To enable concurrency, the
349 <     * next table must be (incrementally) prefilled with place-holders
350 <     * serving as reverse forwarders to the old table.  Because we are
348 >     * from the table to the next table. However, threads claim small
349 >     * blocks of indices to transfer (via field transferIndex) before
350 >     * doing so, reducing contention.  A generation stamp in field
351 >     * sizeCtl ensures that resizings do not overlap. Because we are
352       * using power-of-two expansion, the elements from each bin must
353       * either stay at same index, or move with a power of two
354       * offset. We eliminate unnecessary node creation by catching
# Line 371 | Line 369 | public class ConcurrentHashMap<K,V> impl
369       * locks, average aggregate waits become shorter as resizing
370       * progresses.  The transfer operation must also ensure that all
371       * accessible bins in both the old and new table are usable by any
372 <     * traversal.  This is arranged by proceeding from the last bin
373 <     * (table.length - 1) up towards the first.  Upon seeing a
374 <     * forwarding node, traversals (see class Traverser) arrange to
375 <     * move to the new table without revisiting nodes.  However, to
376 <     * ensure that no intervening nodes are skipped, bin splitting can
377 <     * only begin after the associated reverse-forwarders are in
378 <     * place.
372 >     * traversal.  This is arranged in part by proceeding from the
373 >     * last bin (table.length - 1) up towards the first.  Upon seeing
374 >     * a forwarding node, traversals (see class Traverser) arrange to
375 >     * move to the new table without revisiting nodes.  To ensure that
376 >     * no intervening nodes are skipped even when moved out of order,
377 >     * a stack (see class TableStack) is created on first encounter of
378 >     * a forwarding node during a traversal, to maintain its place if
379 >     * later processing the current table. The need for these
380 >     * save/restore mechanics is relatively rare, but when one
381 >     * forwarding node is encountered, typically many more will be.
382 >     * So Traversers use a simple caching scheme to avoid creating so
383 >     * many new TableStack nodes. (Thanks to Peter Levart for
384 >     * suggesting use of a stack here.)
385       *
386       * The traversal scheme also applies to partial traversals of
387       * ranges of bins (via an alternate Traverser constructor)
# Line 409 | Line 413 | public class ConcurrentHashMap<K,V> impl
413       * related operations (which is the main reason we cannot use
414       * existing collections such as TreeMaps). TreeBins contain
415       * Comparable elements, but may contain others, as well as
416 <     * elements that are Comparable but not necessarily Comparable
417 <     * for the same T, so we cannot invoke compareTo among them. To
418 <     * handle this, the tree is ordered primarily by hash value, then
419 <     * by Comparable.compareTo order if applicable.  On lookup at a
420 <     * node, if elements are not comparable or compare as 0 then both
421 <     * left and right children may need to be searched in the case of
422 <     * tied hash values. (This corresponds to the full list search
423 <     * that would be necessary if all elements were non-Comparable and
424 <     * had tied hashes.)  The red-black balancing code is updated from
425 <     * pre-jdk-collections
416 >     * elements that are Comparable but not necessarily Comparable for
417 >     * the same T, so we cannot invoke compareTo among them. To handle
418 >     * this, the tree is ordered primarily by hash value, then by
419 >     * Comparable.compareTo order if applicable.  On lookup at a node,
420 >     * if elements are not comparable or compare as 0 then both left
421 >     * and right children may need to be searched in the case of tied
422 >     * hash values. (This corresponds to the full list search that
423 >     * would be necessary if all elements were non-Comparable and had
424 >     * tied hashes.) On insertion, to keep a total ordering (or as
425 >     * close as is required here) across rebalancings, we compare
426 >     * classes and identityHashCodes as tie-breakers. The red-black
427 >     * balancing code is updated from pre-jdk-collections
428       * (http://gee.cs.oswego.edu/dl/classes/collections/RBCell.java)
429       * based in turn on Cormen, Leiserson, and Rivest "Introduction to
430       * Algorithms" (CLR).
431       *
432       * TreeBins also require an additional locking mechanism.  While
433       * list traversal is always possible by readers even during
434 <     * updates, tree traversal is not, mainly beause of tree-rotations
434 >     * updates, tree traversal is not, mainly because of tree-rotations
435       * that may change the root node and/or its linkages.  TreeBins
436       * include a simple read-write lock mechanism parasitic on the
437       * main bin-synchronization strategy: Structural adjustments
# Line 448 | Line 454 | public class ConcurrentHashMap<K,V> impl
454       * unused "Segment" class that is instantiated in minimal form
455       * only when serializing.
456       *
457 +     * Also, solely for compatibility with previous versions of this
458 +     * class, it extends AbstractMap, even though all of its methods
459 +     * are overridden, so it is just useless baggage.
460 +     *
461       * This file is organized to make things a little easier to follow
462       * while reading than they might otherwise: First the main static
463       * declarations and utilities, then fields, then main public
# Line 528 | Line 538 | public class ConcurrentHashMap<K,V> impl
538       */
539      private static final int MIN_TRANSFER_STRIDE = 16;
540  
541 +    /**
542 +     * The number of bits used for generation stamp in sizeCtl.
543 +     * Must be at least 6 for 32bit arrays.
544 +     */
545 +    private static int RESIZE_STAMP_BITS = 16;
546 +
547 +    /**
548 +     * The maximum number of threads that can help resize.
549 +     * Must fit in 32 - RESIZE_STAMP_BITS bits.
550 +     */
551 +    private static final int MAX_RESIZERS = (1 << (32 - RESIZE_STAMP_BITS)) - 1;
552 +
553 +    /**
554 +     * The bit shift for recording size stamp in sizeCtl.
555 +     */
556 +    private static final int RESIZE_STAMP_SHIFT = 32 - RESIZE_STAMP_BITS;
557 +
558      /*
559       * Encodings for Node hash fields. See above for explanation.
560       */
561 <    static final int MOVED     = 0x8fffffff; // (-1) hash for forwarding nodes
562 <    static final int TREEBIN   = 0x80000000; // hash for heads of treea
563 <    static final int RESERVED  = 0x80000001; // hash for transient reservations
561 >    static final int MOVED     = -1; // hash for forwarding nodes
562 >    static final int TREEBIN   = -2; // hash for roots of trees
563 >    static final int RESERVED  = -3; // hash for transient reservations
564      static final int HASH_BITS = 0x7fffffff; // usable bits of normal node hash
565  
566      /** Number of CPUS, to place bounds on some sizings */
# Line 552 | Line 579 | public class ConcurrentHashMap<K,V> impl
579       * Key-value entry.  This class is never exported out as a
580       * user-mutable Map.Entry (i.e., one supporting setValue; see
581       * MapEntry below), but can be used for read-only traversals used
582 <     * in bulk tasks.  Subclasses of Node with a negativehash field
582 >     * in bulk tasks.  Subclasses of Node with a negative hash field
583       * are special, and contain null keys and values (but are never
584       * exported).  Otherwise, keys and vals are never null.
585       */
# Line 560 | Line 587 | public class ConcurrentHashMap<K,V> impl
587          final int hash;
588          final K key;
589          volatile V val;
590 <        Node<K,V> next;
590 >        volatile Node<K,V> next;
591  
592          Node(int hash, K key, V val, Node<K,V> next) {
593              this.hash = hash;
# Line 685 | Line 712 | public class ConcurrentHashMap<K,V> impl
712       * errors by users, these checks must operate on local variables,
713       * which accounts for some odd-looking inline assignments below.
714       * Note that calls to setTabAt always occur within locked regions,
715 <     * and so do not need full volatile semantics, but still require
716 <     * ordering to maintain concurrent readability.
715 >     * and so in principle require only release ordering, not
716 >     * full volatile semantics, but are currently coded as volatile
717 >     * writes to be conservative.
718       */
719  
720      @SuppressWarnings("unchecked")
# Line 700 | Line 728 | public class ConcurrentHashMap<K,V> impl
728      }
729  
730      static final <K,V> void setTabAt(Node<K,V>[] tab, int i, Node<K,V> v) {
731 <        U.putOrderedObject(tab, ((long)i << ASHIFT) + ABASE, v);
731 >        U.putObjectVolatile(tab, ((long)i << ASHIFT) + ABASE, v);
732      }
733  
734      /* ---------------- Fields -------------- */
# Line 739 | Line 767 | public class ConcurrentHashMap<K,V> impl
767      private transient volatile int transferIndex;
768  
769      /**
742     * The least available table index to split while resizing.
743     */
744    private transient volatile int transferOrigin;
745
746    /**
770       * Spinlock (locked via CAS) used when resizing and/or creating CounterCells.
771       */
772      private transient volatile int cellsBusy;
# Line 1000 | Line 1023 | public class ConcurrentHashMap<K,V> impl
1023                                      p.val = value;
1024                              }
1025                          }
1026 +                        else if (f instanceof ReservationNode)
1027 +                            throw new IllegalStateException("Recursive update");
1028                      }
1029                  }
1030                  if (binCount != 0) {
# Line 1102 | Line 1127 | public class ConcurrentHashMap<K,V> impl
1127                                  }
1128                              }
1129                          }
1130 +                        else if (f instanceof ReservationNode)
1131 +                            throw new IllegalStateException("Recursive update");
1132                      }
1133                  }
1134                  if (validated) {
# Line 1162 | Line 1189 | public class ConcurrentHashMap<K,V> impl
1189       * operations.  It does not support the {@code add} or
1190       * {@code addAll} operations.
1191       *
1192 <     * <p>The view's {@code iterator} is a "weakly consistent" iterator
1193 <     * that will never throw {@link ConcurrentModificationException},
1194 <     * and guarantees to traverse elements as they existed upon
1195 <     * construction of the iterator, and may (but is not guaranteed to)
1196 <     * reflect any modifications subsequent to construction.
1192 >     * <p>The view's iterators and spliterators are
1193 >     * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>.
1194 >     *
1195 >     * <p>The view's {@code spliterator} reports {@link Spliterator#CONCURRENT},
1196 >     * {@link Spliterator#DISTINCT}, and {@link Spliterator#NONNULL}.
1197       *
1198       * @return the set view
1199       */
# Line 1185 | Line 1212 | public class ConcurrentHashMap<K,V> impl
1212       * {@code retainAll}, and {@code clear} operations.  It does not
1213       * support the {@code add} or {@code addAll} operations.
1214       *
1215 <     * <p>The view's {@code iterator} is a "weakly consistent" iterator
1216 <     * that will never throw {@link ConcurrentModificationException},
1217 <     * and guarantees to traverse elements as they existed upon
1218 <     * construction of the iterator, and may (but is not guaranteed to)
1219 <     * reflect any modifications subsequent to construction.
1215 >     * <p>The view's iterators and spliterators are
1216 >     * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>.
1217 >     *
1218 >     * <p>The view's {@code spliterator} reports {@link Spliterator#CONCURRENT}
1219 >     * and {@link Spliterator#NONNULL}.
1220       *
1221       * @return the collection view
1222       */
# Line 1207 | Line 1234 | public class ConcurrentHashMap<K,V> impl
1234       * {@code removeAll}, {@code retainAll}, and {@code clear}
1235       * operations.
1236       *
1237 <     * <p>The view's {@code iterator} is a "weakly consistent" iterator
1238 <     * that will never throw {@link ConcurrentModificationException},
1239 <     * and guarantees to traverse elements as they existed upon
1240 <     * construction of the iterator, and may (but is not guaranteed to)
1241 <     * reflect any modifications subsequent to construction.
1237 >     * <p>The view's iterators and spliterators are
1238 >     * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>.
1239 >     *
1240 >     * <p>The view's {@code spliterator} reports {@link Spliterator#CONCURRENT},
1241 >     * {@link Spliterator#DISTINCT}, and {@link Spliterator#NONNULL}.
1242       *
1243       * @return the set view
1244       */
# Line 1321 | Line 1348 | public class ConcurrentHashMap<K,V> impl
1348       * Saves the state of the {@code ConcurrentHashMap} instance to a
1349       * stream (i.e., serializes it).
1350       * @param s the stream
1351 +     * @throws java.io.IOException if an I/O error occurs
1352       * @serialData
1353       * the key (Object) and value (Object)
1354       * for each key-value mapping, followed by a null pair.
# Line 1338 | Line 1366 | public class ConcurrentHashMap<K,V> impl
1366          }
1367          int segmentShift = 32 - sshift;
1368          int segmentMask = ssize - 1;
1369 <        @SuppressWarnings("unchecked") Segment<K,V>[] segments = (Segment<K,V>[])
1369 >        @SuppressWarnings("unchecked")
1370 >        Segment<K,V>[] segments = (Segment<K,V>[])
1371              new Segment<?,?>[DEFAULT_CONCURRENCY_LEVEL];
1372          for (int i = 0; i < segments.length; ++i)
1373              segments[i] = new Segment<K,V>(LOAD_FACTOR);
# Line 1363 | Line 1392 | public class ConcurrentHashMap<K,V> impl
1392      /**
1393       * Reconstitutes the instance from a stream (that is, deserializes it).
1394       * @param s the stream
1395 +     * @throws ClassNotFoundException if the class of a serialized object
1396 +     *         could not be found
1397 +     * @throws java.io.IOException if an I/O error occurs
1398       */
1399      private void readObject(java.io.ObjectInputStream s)
1400          throws java.io.IOException, ClassNotFoundException {
# Line 1378 | Line 1410 | public class ConcurrentHashMap<K,V> impl
1410          long size = 0L;
1411          Node<K,V> p = null;
1412          for (;;) {
1413 <            @SuppressWarnings("unchecked") K k = (K) s.readObject();
1414 <            @SuppressWarnings("unchecked") V v = (V) s.readObject();
1413 >            @SuppressWarnings("unchecked")
1414 >            K k = (K) s.readObject();
1415 >            @SuppressWarnings("unchecked")
1416 >            V v = (V) s.readObject();
1417              if (k != null && v != null) {
1418                  p = new Node<K,V>(spread(k.hashCode()), k, v, p);
1419                  ++size;
# Line 1397 | Line 1431 | public class ConcurrentHashMap<K,V> impl
1431                  int sz = (int)size;
1432                  n = tableSizeFor(sz + (sz >>> 1) + 1);
1433              }
1434 <            @SuppressWarnings({"rawtypes","unchecked"})
1435 <                Node<K,V>[] tab = (Node<K,V>[])new Node[n];
1434 >            @SuppressWarnings("unchecked")
1435 >            Node<K,V>[] tab = (Node<K,V>[])new Node<?,?>[n];
1436              int mask = n - 1;
1437              long added = 0L;
1438              while (p != null) {
# Line 1623 | Line 1657 | public class ConcurrentHashMap<K,V> impl
1657                                  Node<K,V> pred = e;
1658                                  if ((e = e.next) == null) {
1659                                      if ((val = mappingFunction.apply(key)) != null) {
1660 +                                        if (pred.next != null)
1661 +                                            throw new IllegalStateException("Recursive update");
1662                                          added = true;
1663                                          pred.next = new Node<K,V>(h, key, val, null);
1664                                      }
# Line 1642 | Line 1678 | public class ConcurrentHashMap<K,V> impl
1678                                  t.putTreeVal(h, key, val);
1679                              }
1680                          }
1681 +                        else if (f instanceof ReservationNode)
1682 +                            throw new IllegalStateException("Recursive update");
1683                      }
1684                  }
1685                  if (binCount != 0) {
# Line 1737 | Line 1775 | public class ConcurrentHashMap<K,V> impl
1775                                  }
1776                              }
1777                          }
1778 +                        else if (f instanceof ReservationNode)
1779 +                            throw new IllegalStateException("Recursive update");
1780                      }
1781                  }
1782                  if (binCount != 0)
# Line 1828 | Line 1868 | public class ConcurrentHashMap<K,V> impl
1868                                  if ((e = e.next) == null) {
1869                                      val = remappingFunction.apply(key, null);
1870                                      if (val != null) {
1871 +                                        if (pred.next != null)
1872 +                                            throw new IllegalStateException("Recursive update");
1873                                          delta = 1;
1874                                          pred.next =
1875                                              new Node<K,V>(h, key, val, null);
# Line 1860 | Line 1902 | public class ConcurrentHashMap<K,V> impl
1902                                      setTabAt(tab, i, untreeify(t.first));
1903                              }
1904                          }
1905 +                        else if (f instanceof ReservationNode)
1906 +                            throw new IllegalStateException("Recursive update");
1907                      }
1908                  }
1909                  if (binCount != 0) {
# Line 1969 | Line 2013 | public class ConcurrentHashMap<K,V> impl
2013                                      setTabAt(tab, i, untreeify(t.first));
2014                              }
2015                          }
2016 +                        else if (f instanceof ReservationNode)
2017 +                            throw new IllegalStateException("Recursive update");
2018                      }
2019                  }
2020                  if (binCount != 0) {
# Line 1987 | Line 2033 | public class ConcurrentHashMap<K,V> impl
2033  
2034      /**
2035       * Legacy method testing if some key maps into the specified value
2036 <     * in this table.  This method is identical in functionality to
2036 >     * in this table.
2037 >     *
2038 >     * @deprecated This method is identical in functionality to
2039       * {@link #containsValue(Object)}, and exists solely to ensure
2040       * full compatibility with class {@link java.util.Hashtable},
2041       * which supported this method prior to introduction of the
# Line 2000 | Line 2048 | public class ConcurrentHashMap<K,V> impl
2048       *         {@code false} otherwise
2049       * @throws NullPointerException if the specified value is null
2050       */
2051 <    @Deprecated public boolean contains(Object value) {
2051 >    @Deprecated
2052 >    public boolean contains(Object value) {
2053          return containsValue(value);
2054      }
2055  
# Line 2049 | Line 2098 | public class ConcurrentHashMap<K,V> impl
2098       * Creates a new {@link Set} backed by a ConcurrentHashMap
2099       * from the given type to {@code Boolean.TRUE}.
2100       *
2101 +     * @param <K> the element type of the returned set
2102       * @return the new set
2103       * @since 1.8
2104       */
# Line 2063 | Line 2113 | public class ConcurrentHashMap<K,V> impl
2113       *
2114       * @param initialCapacity The implementation performs internal
2115       * sizing to accommodate this many elements.
2116 +     * @param <K> the element type of the returned set
2117 +     * @return the new set
2118       * @throws IllegalArgumentException if the initial capacity of
2119       * elements is negative
2068     * @return the new set
2120       * @since 1.8
2121       */
2122      public static <K> KeySetView<K,Boolean> newKeySet(int initialCapacity) {
# Line 2103 | Line 2154 | public class ConcurrentHashMap<K,V> impl
2154          }
2155  
2156          Node<K,V> find(int h, Object k) {
2157 <            Node<K,V> e; int n;
2158 <            Node<K,V>[] tab = nextTable;
2159 <            if (k != null && tab != null && (n = tab.length) > 0 &&
2160 <                (e = tabAt(tab, (n - 1) & h)) != null) {
2161 <                do {
2157 >            // loop to avoid arbitrarily deep recursion on forwarding nodes
2158 >            outer: for (Node<K,V>[] tab = nextTable;;) {
2159 >                Node<K,V> e; int n;
2160 >                if (k == null || tab == null || (n = tab.length) == 0 ||
2161 >                    (e = tabAt(tab, (n - 1) & h)) == null)
2162 >                    return null;
2163 >                for (;;) {
2164                      int eh; K ek;
2165                      if ((eh = e.hash) == h &&
2166                          ((ek = e.key) == k || (ek != null && k.equals(ek))))
2167                          return e;
2168 <                    if (eh < 0)
2169 <                        return e.find(h, k);
2170 <                } while ((e = e.next) != null);
2168 >                    if (eh < 0) {
2169 >                        if (e instanceof ForwardingNode) {
2170 >                            tab = ((ForwardingNode<K,V>)e).nextTable;
2171 >                            continue outer;
2172 >                        }
2173 >                        else
2174 >                            return e.find(h, k);
2175 >                    }
2176 >                    if ((e = e.next) == null)
2177 >                        return null;
2178 >                }
2179              }
2119            return null;
2180          }
2181      }
2182  
# Line 2136 | Line 2196 | public class ConcurrentHashMap<K,V> impl
2196      /* ---------------- Table Initialization and Resizing -------------- */
2197  
2198      /**
2199 +     * Returns the stamp bits for resizing a table of size n.
2200 +     * Must be negative when shifted left by RESIZE_STAMP_SHIFT.
2201 +     */
2202 +    static final int resizeStamp(int n) {
2203 +        return Integer.numberOfLeadingZeros(n) | (1 << (RESIZE_STAMP_BITS - 1));
2204 +    }
2205 +
2206 +    /**
2207       * Initializes table, using the size recorded in sizeCtl.
2208       */
2209      private final Node<K,V>[] initTable() {
# Line 2147 | Line 2215 | public class ConcurrentHashMap<K,V> impl
2215                  try {
2216                      if ((tab = table) == null || tab.length == 0) {
2217                          int n = (sc > 0) ? sc : DEFAULT_CAPACITY;
2218 <                        @SuppressWarnings({"rawtypes","unchecked"})
2219 <                            Node<K,V>[] nt = (Node<K,V>[])new Node[n];
2218 >                        @SuppressWarnings("unchecked")
2219 >                        Node<K,V>[] nt = (Node<K,V>[])new Node<?,?>[n];
2220                          table = tab = nt;
2221                          sc = n - (n >>> 2);
2222                      }
# Line 2189 | Line 2257 | public class ConcurrentHashMap<K,V> impl
2257              s = sumCount();
2258          }
2259          if (check >= 0) {
2260 <            Node<K,V>[] tab, nt; int sc;
2260 >            Node<K,V>[] tab, nt; int n, sc;
2261              while (s >= (long)(sc = sizeCtl) && (tab = table) != null &&
2262 <                   tab.length < MAXIMUM_CAPACITY) {
2262 >                   (n = tab.length) < MAXIMUM_CAPACITY) {
2263 >                int rs = resizeStamp(n);
2264                  if (sc < 0) {
2265 <                    if (sc == -1 || transferIndex <= transferOrigin ||
2266 <                        (nt = nextTable) == null)
2265 >                    if ((sc >>> RESIZE_STAMP_SHIFT) != rs || sc == rs + 1 ||
2266 >                        sc == rs + MAX_RESIZERS || (nt = nextTable) == null ||
2267 >                        transferIndex <= 0)
2268                          break;
2269 <                    if (U.compareAndSwapInt(this, SIZECTL, sc, sc - 1))
2269 >                    if (U.compareAndSwapInt(this, SIZECTL, sc, sc + 1))
2270                          transfer(tab, nt);
2271                  }
2272 <                else if (U.compareAndSwapInt(this, SIZECTL, sc, -2))
2272 >                else if (U.compareAndSwapInt(this, SIZECTL, sc,
2273 >                                             (rs << RESIZE_STAMP_SHIFT) + 2))
2274                      transfer(tab, null);
2275                  s = sumCount();
2276              }
# Line 2211 | Line 2282 | public class ConcurrentHashMap<K,V> impl
2282       */
2283      final Node<K,V>[] helpTransfer(Node<K,V>[] tab, Node<K,V> f) {
2284          Node<K,V>[] nextTab; int sc;
2285 <        if ((f instanceof ForwardingNode) &&
2285 >        if (tab != null && (f instanceof ForwardingNode) &&
2286              (nextTab = ((ForwardingNode<K,V>)f).nextTable) != null) {
2287 <            if (nextTab == nextTable && tab == table &&
2288 <                transferIndex > transferOrigin && (sc = sizeCtl) < -1 &&
2289 <                U.compareAndSwapInt(this, SIZECTL, sc, sc - 1))
2290 <                transfer(tab, nextTab);
2287 >            int rs = resizeStamp(tab.length);
2288 >            while (nextTab == nextTable && table == tab &&
2289 >                   (sc = sizeCtl) < 0) {
2290 >                if ((sc >>> RESIZE_STAMP_SHIFT) != rs || sc == rs + 1 ||
2291 >                    sc == rs + MAX_RESIZERS || transferIndex <= 0)
2292 >                    break;
2293 >                if (U.compareAndSwapInt(this, SIZECTL, sc, sc + 1)) {
2294 >                    transfer(tab, nextTab);
2295 >                    break;
2296 >                }
2297 >            }
2298              return nextTab;
2299          }
2300          return table;
# Line 2238 | Line 2316 | public class ConcurrentHashMap<K,V> impl
2316                  if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
2317                      try {
2318                          if (table == tab) {
2319 <                            @SuppressWarnings({"rawtypes","unchecked"})
2320 <                                Node<K,V>[] nt = (Node<K,V>[])new Node[n];
2319 >                            @SuppressWarnings("unchecked")
2320 >                            Node<K,V>[] nt = (Node<K,V>[])new Node<?,?>[n];
2321                              table = nt;
2322                              sc = n - (n >>> 2);
2323                          }
# Line 2250 | Line 2328 | public class ConcurrentHashMap<K,V> impl
2328              }
2329              else if (c <= sc || n >= MAXIMUM_CAPACITY)
2330                  break;
2331 <            else if (tab == table &&
2332 <                     U.compareAndSwapInt(this, SIZECTL, sc, -2))
2333 <                transfer(tab, null);
2331 >            else if (tab == table) {
2332 >                int rs = resizeStamp(n);
2333 >                if (U.compareAndSwapInt(this, SIZECTL, sc,
2334 >                                        (rs << RESIZE_STAMP_SHIFT) + 2))
2335 >                    transfer(tab, null);
2336 >            }
2337          }
2338      }
2339  
# Line 2266 | Line 2347 | public class ConcurrentHashMap<K,V> impl
2347              stride = MIN_TRANSFER_STRIDE; // subdivide range
2348          if (nextTab == null) {            // initiating
2349              try {
2350 <                @SuppressWarnings({"rawtypes","unchecked"})
2351 <                    Node<K,V>[] nt = (Node<K,V>[])new Node[n << 1];
2350 >                @SuppressWarnings("unchecked")
2351 >                Node<K,V>[] nt = (Node<K,V>[])new Node<?,?>[n << 1];
2352                  nextTab = nt;
2353              } catch (Throwable ex) {      // try to cope with OOME
2354                  sizeCtl = Integer.MAX_VALUE;
2355                  return;
2356              }
2357              nextTable = nextTab;
2277            transferOrigin = n;
2358              transferIndex = n;
2279            ForwardingNode<K,V> rev = new ForwardingNode<K,V>(tab);
2280            for (int k = n; k > 0;) {    // progressively reveal ready slots
2281                int nextk = (k > stride) ? k - stride : 0;
2282                for (int m = nextk; m < k; ++m)
2283                    nextTab[m] = rev;
2284                for (int m = n + nextk; m < n + k; ++m)
2285                    nextTab[m] = rev;
2286                U.putOrderedInt(this, TRANSFERORIGIN, k = nextk);
2287            }
2359          }
2360          int nextn = nextTab.length;
2361          ForwardingNode<K,V> fwd = new ForwardingNode<K,V>(nextTab);
2362          boolean advance = true;
2363 +        boolean finishing = false; // to ensure sweep before committing nextTab
2364          for (int i = 0, bound = 0;;) {
2365 <            int nextIndex, nextBound, fh; Node<K,V> f;
2365 >            Node<K,V> f; int fh;
2366              while (advance) {
2367 <                if (--i >= bound)
2367 >                int nextIndex, nextBound;
2368 >                if (--i >= bound || finishing)
2369                      advance = false;
2370 <                else if ((nextIndex = transferIndex) <= transferOrigin) {
2370 >                else if ((nextIndex = transferIndex) <= 0) {
2371                      i = -1;
2372                      advance = false;
2373                  }
# Line 2308 | Line 2381 | public class ConcurrentHashMap<K,V> impl
2381                  }
2382              }
2383              if (i < 0 || i >= n || i + n >= nextn) {
2384 <                for (int sc;;) {
2385 <                    if (U.compareAndSwapInt(this, SIZECTL, sc = sizeCtl, ++sc)) {
2386 <                        if (sc == -1) {
2387 <                            nextTable = null;
2388 <                            table = nextTab;
2389 <                            sizeCtl = (n << 1) - (n >>> 1);
2317 <                        }
2318 <                        return;
2319 <                    }
2384 >                int sc;
2385 >                if (finishing) {
2386 >                    nextTable = null;
2387 >                    table = nextTab;
2388 >                    sizeCtl = (n << 1) - (n >>> 1);
2389 >                    return;
2390                  }
2391 <            }
2392 <            else if ((f = tabAt(tab, i)) == null) {
2393 <                if (casTabAt(tab, i, null, fwd)) {
2394 <                    setTabAt(nextTab, i, null);
2395 <                    setTabAt(nextTab, i + n, null);
2326 <                    advance = true;
2391 >                if (U.compareAndSwapInt(this, SIZECTL, sc = sizeCtl, sc - 1)) {
2392 >                    if ((sc - 2) != resizeStamp(n) << RESIZE_STAMP_SHIFT)
2393 >                        return;
2394 >                    finishing = advance = true;
2395 >                    i = n; // recheck before commit
2396                  }
2397              }
2398 +            else if ((f = tabAt(tab, i)) == null)
2399 +                advance = casTabAt(tab, i, null, fwd);
2400              else if ((fh = f.hash) == MOVED)
2401                  advance = true; // already processed
2402              else {
# Line 2357 | Line 2428 | public class ConcurrentHashMap<K,V> impl
2428                                  else
2429                                      hn = new Node<K,V>(ph, pk, pv, hn);
2430                              }
2431 +                            setTabAt(nextTab, i, ln);
2432 +                            setTabAt(nextTab, i + n, hn);
2433 +                            setTabAt(tab, i, fwd);
2434 +                            advance = true;
2435                          }
2436                          else if (f instanceof TreeBin) {
2437                              TreeBin<K,V> t = (TreeBin<K,V>)f;
# Line 2388 | Line 2463 | public class ConcurrentHashMap<K,V> impl
2463                                  (hc != 0) ? new TreeBin<K,V>(lo) : t;
2464                              hn = (hc <= UNTREEIFY_THRESHOLD) ? untreeify(hi) :
2465                                  (lc != 0) ? new TreeBin<K,V>(hi) : t;
2466 +                            setTabAt(nextTab, i, ln);
2467 +                            setTabAt(nextTab, i + n, hn);
2468 +                            setTabAt(tab, i, fwd);
2469 +                            advance = true;
2470                          }
2392                        else
2393                            ln = hn = null;
2394                        setTabAt(nextTab, i, ln);
2395                        setTabAt(nextTab, i + n, hn);
2396                        setTabAt(tab, i, fwd);
2397                        advance = true;
2471                      }
2472                  }
2473              }
# Line 2515 | Line 2588 | public class ConcurrentHashMap<K,V> impl
2588      private final void treeifyBin(Node<K,V>[] tab, int index) {
2589          Node<K,V> b; int n, sc;
2590          if (tab != null) {
2591 <            if ((n = tab.length) < MIN_TREEIFY_CAPACITY) {
2592 <                if (tab == table && (sc = sizeCtl) >= 0 &&
2593 <                    U.compareAndSwapInt(this, SIZECTL, sc, -2))
2521 <                    transfer(tab, null);
2522 <            }
2523 <            else if ((b = tabAt(tab, index)) != null) {
2591 >            if ((n = tab.length) < MIN_TREEIFY_CAPACITY)
2592 >                tryPresize(n << 1);
2593 >            else if ((b = tabAt(tab, index)) != null && b.hash >= 0) {
2594                  synchronized (b) {
2595                      if (tabAt(tab, index) == b) {
2596                          TreeNode<K,V> hd = null, tl = null;
# Line 2542 | Line 2612 | public class ConcurrentHashMap<K,V> impl
2612      }
2613  
2614      /**
2615 <     * Returns a list on non-TreeNodes replacing those in given list
2615 >     * Returns a list on non-TreeNodes replacing those in given list.
2616       */
2617      static <K,V> Node<K,V> untreeify(Node<K,V> b) {
2618          Node<K,V> hd = null, tl = null;
# Line 2586 | Line 2656 | public class ConcurrentHashMap<K,V> impl
2656          final TreeNode<K,V> findTreeNode(int h, Object k, Class<?> kc) {
2657              if (k != null) {
2658                  TreeNode<K,V> p = this;
2659 <                do  {
2659 >                do {
2660                      int ph, dir; K pk; TreeNode<K,V> q;
2661                      TreeNode<K,V> pl = p.left, pr = p.right;
2662                      if ((ph = p.hash) > h)
# Line 2595 | Line 2665 | public class ConcurrentHashMap<K,V> impl
2665                          p = pr;
2666                      else if ((pk = p.key) == k || (pk != null && k.equals(pk)))
2667                          return p;
2668 <                    else if (pl == null && pr == null)
2669 <                        break;
2668 >                    else if (pl == null)
2669 >                        p = pr;
2670 >                    else if (pr == null)
2671 >                        p = pl;
2672                      else if ((kc != null ||
2673                                (kc = comparableClassFor(k)) != null) &&
2674                               (dir = compareComparables(kc, k, pk)) != 0)
2675                          p = (dir < 0) ? pl : pr;
2676 <                    else if (pl == null)
2605 <                        p = pr;
2606 <                    else if (pr == null ||
2607 <                             (q = pr.findTreeNode(h, k, kc)) == null)
2608 <                        p = pl;
2609 <                    else
2676 >                    else if ((q = pr.findTreeNode(h, k, kc)) != null)
2677                          return q;
2678 +                    else
2679 +                        p = pl;
2680                  } while (p != null);
2681              }
2682              return null;
# Line 2634 | Line 2703 | public class ConcurrentHashMap<K,V> impl
2703          static final int READER = 4; // increment value for setting read lock
2704  
2705          /**
2706 +         * Tie-breaking utility for ordering insertions when equal
2707 +         * hashCodes and non-comparable. We don't require a total
2708 +         * order, just a consistent insertion rule to maintain
2709 +         * equivalence across rebalancings. Tie-breaking further than
2710 +         * necessary simplifies testing a bit.
2711 +         */
2712 +        static int tieBreakOrder(Object a, Object b) {
2713 +            int d;
2714 +            if (a == null || b == null ||
2715 +                (d = a.getClass().getName().
2716 +                 compareTo(b.getClass().getName())) == 0)
2717 +                d = (System.identityHashCode(a) <= System.identityHashCode(b) ?
2718 +                     -1 : 1);
2719 +            return d;
2720 +        }
2721 +
2722 +        /**
2723           * Creates bin with initial set of nodes headed by b.
2724           */
2725          TreeBin(TreeNode<K,V> b) {
# Line 2649 | Line 2735 | public class ConcurrentHashMap<K,V> impl
2735                      r = x;
2736                  }
2737                  else {
2738 <                    Object key = x.key;
2739 <                    int hash = x.hash;
2738 >                    K k = x.key;
2739 >                    int h = x.hash;
2740                      Class<?> kc = null;
2741                      for (TreeNode<K,V> p = r;;) {
2742                          int dir, ph;
2743 <                        if ((ph = p.hash) > hash)
2743 >                        K pk = p.key;
2744 >                        if ((ph = p.hash) > h)
2745                              dir = -1;
2746 <                        else if (ph < hash)
2746 >                        else if (ph < h)
2747                              dir = 1;
2748 <                        else if ((kc != null ||
2749 <                                  (kc = comparableClassFor(key)) != null))
2750 <                            dir = compareComparables(kc, key, p.key);
2751 <                        else
2665 <                            dir = 0;
2748 >                        else if ((kc == null &&
2749 >                                  (kc = comparableClassFor(k)) == null) ||
2750 >                                 (dir = compareComparables(kc, k, pk)) == 0)
2751 >                            dir = tieBreakOrder(k, pk);
2752                          TreeNode<K,V> xp = p;
2753                          if ((p = (dir <= 0) ? p.left : p.right) == null) {
2754                              x.parent = xp;
# Line 2677 | Line 2763 | public class ConcurrentHashMap<K,V> impl
2763                  }
2764              }
2765              this.root = r;
2766 +            assert checkInvariants(root);
2767          }
2768  
2769          /**
2770 <         * Acquires write lock for tree restructuring
2770 >         * Acquires write lock for tree restructuring.
2771           */
2772          private final void lockRoot() {
2773              if (!U.compareAndSwapInt(this, LOCKSTATE, 0, WRITER))
# Line 2688 | Line 2775 | public class ConcurrentHashMap<K,V> impl
2775          }
2776  
2777          /**
2778 <         * Releases write lock for tree restructuring
2778 >         * Releases write lock for tree restructuring.
2779           */
2780          private final void unlockRoot() {
2781              lockState = 0;
2782          }
2783  
2784          /**
2785 <         * Possibly blocks awaiting root lock
2785 >         * Possibly blocks awaiting root lock.
2786           */
2787          private final void contendedLock() {
2788              boolean waiting = false;
2789              for (int s;;) {
2790 <                if (((s = lockState) & WRITER) == 0) {
2790 >                if (((s = lockState) & ~WAITER) == 0) {
2791                      if (U.compareAndSwapInt(this, LOCKSTATE, s, WRITER)) {
2792                          if (waiting)
2793                              waiter = null;
2794                          return;
2795                      }
2796                  }
2797 <                else if ((s | WAITER) == 0) {
2797 >                else if ((s & WAITER) == 0) {
2798                      if (U.compareAndSwapInt(this, LOCKSTATE, s, s | WAITER)) {
2799                          waiting = true;
2800                          waiter = Thread.currentThread();
# Line 2720 | Line 2807 | public class ConcurrentHashMap<K,V> impl
2807  
2808          /**
2809           * Returns matching node or null if none. Tries to search
2810 <         * using tree compareisons from root, but continues linear
2810 >         * using tree comparisons from root, but continues linear
2811           * search when lock not available.
2812           */
2813          final Node<K,V> find(int h, Object k) {
2814              if (k != null) {
2815 <                for (Node<K,V> e = first; e != null; e = e.next) {
2815 >                for (Node<K,V> e = first; e != null; ) {
2816                      int s; K ek;
2817                      if (((s = lockState) & (WAITER|WRITER)) != 0) {
2818                          if (e.hash == h &&
2819                              ((ek = e.key) == k || (ek != null && k.equals(ek))))
2820                              return e;
2821 +                        e = e.next;
2822                      }
2823                      else if (U.compareAndSwapInt(this, LOCKSTATE, s,
2824                                                   s + READER)) {
# Line 2757 | Line 2845 | public class ConcurrentHashMap<K,V> impl
2845           */
2846          final TreeNode<K,V> putTreeVal(int h, K k, V v) {
2847              Class<?> kc = null;
2848 +            boolean searched = false;
2849              for (TreeNode<K,V> p = root;;) {
2850 <                int dir, ph; K pk; TreeNode<K,V> q, pr;
2850 >                int dir, ph; K pk;
2851                  if (p == null) {
2852                      first = root = new TreeNode<K,V>(h, k, v, null, null);
2853                      break;
# Line 2772 | Line 2861 | public class ConcurrentHashMap<K,V> impl
2861                  else if ((kc == null &&
2862                            (kc = comparableClassFor(k)) == null) ||
2863                           (dir = compareComparables(kc, k, pk)) == 0) {
2864 <                    if (p.left == null)
2865 <                        dir = 1;
2866 <                    else if ((pr = p.right) == null ||
2867 <                             (q = pr.findTreeNode(h, k, kc)) == null)
2868 <                        dir = -1;
2869 <                    else
2870 <                        return q;
2864 >                    if (!searched) {
2865 >                        TreeNode<K,V> q, ch;
2866 >                        searched = true;
2867 >                        if (((ch = p.left) != null &&
2868 >                             (q = ch.findTreeNode(h, k, kc)) != null) ||
2869 >                            ((ch = p.right) != null &&
2870 >                             (q = ch.findTreeNode(h, k, kc)) != null))
2871 >                            return q;
2872 >                    }
2873 >                    dir = tieBreakOrder(k, pk);
2874                  }
2875 +
2876                  TreeNode<K,V> xp = p;
2877 <                if ((p = (dir < 0) ? p.left : p.right) == null) {
2877 >                if ((p = (dir <= 0) ? p.left : p.right) == null) {
2878                      TreeNode<K,V> x, f = first;
2879                      first = x = new TreeNode<K,V>(h, k, v, f, xp);
2880                      if (f != null)
2881                          f.prev = x;
2882 <                    if (dir < 0)
2882 >                    if (dir <= 0)
2883                          xp.left = x;
2884                      else
2885                          xp.right = x;
# Line 2815 | Line 2908 | public class ConcurrentHashMap<K,V> impl
2908           * that are accessible independently of lock. So instead we
2909           * swap the tree linkages.
2910           *
2911 <         * @return true if now too small so should be untreeified.
2911 >         * @return true if now too small, so should be untreeified
2912           */
2913          final boolean removeTreeNode(TreeNode<K,V> p) {
2914              TreeNode<K,V> next = (TreeNode<K,V>)p.next;
# Line 3009 | Line 3102 | public class ConcurrentHashMap<K,V> impl
3102  
3103          static <K,V> TreeNode<K,V> balanceDeletion(TreeNode<K,V> root,
3104                                                     TreeNode<K,V> x) {
3105 <            for (TreeNode<K,V> xp, xpl, xpr;;)  {
3105 >            for (TreeNode<K,V> xp, xpl, xpr;;) {
3106                  if (x == null || x == root)
3107                      return root;
3108                  else if ((xp = x.parent) == null) {
# Line 3132 | Line 3225 | public class ConcurrentHashMap<K,V> impl
3225                  Class<?> k = TreeBin.class;
3226                  LOCKSTATE = U.objectFieldOffset
3227                      (k.getDeclaredField("lockState"));
3228 <            } catch (Exception e) {
3228 >            } catch (ReflectiveOperationException e) {
3229                  throw new Error(e);
3230              }
3231          }
# Line 3141 | Line 3234 | public class ConcurrentHashMap<K,V> impl
3234      /* ----------------Table Traversal -------------- */
3235  
3236      /**
3237 +     * Records the table, its length, and current traversal index for a
3238 +     * traverser that must process a region of a forwarded table before
3239 +     * proceeding with current table.
3240 +     */
3241 +    static final class TableStack<K,V> {
3242 +        int length;
3243 +        int index;
3244 +        Node<K,V>[] tab;
3245 +        TableStack<K,V> next;
3246 +    }
3247 +
3248 +    /**
3249       * Encapsulates traversal for methods such as containsValue; also
3250       * serves as a base class for other iterators and spliterators.
3251       *
# Line 3164 | Line 3269 | public class ConcurrentHashMap<K,V> impl
3269      static class Traverser<K,V> {
3270          Node<K,V>[] tab;        // current table; updated if resized
3271          Node<K,V> next;         // the next entry to use
3272 +        TableStack<K,V> stack, spare; // to save/restore on ForwardingNodes
3273          int index;              // index of bin to use next
3274          int baseIndex;          // current index of initial table
3275          int baseLimit;          // index bound for initial table
# Line 3185 | Line 3291 | public class ConcurrentHashMap<K,V> impl
3291              if ((e = next) != null)
3292                  e = e.next;
3293              for (;;) {
3294 <                Node<K,V>[] t; int i, n; K ek;  // must use locals in checks
3294 >                Node<K,V>[] t; int i, n;  // must use locals in checks
3295                  if (e != null)
3296                      return next = e;
3297                  if (baseIndex >= baseLimit || (t = tab) == null ||
3298                      (n = t.length) <= (i = index) || i < 0)
3299                      return next = null;
3300 <                if ((e = tabAt(t, index)) != null && e.hash < 0) {
3300 >                if ((e = tabAt(t, i)) != null && e.hash < 0) {
3301                      if (e instanceof ForwardingNode) {
3302                          tab = ((ForwardingNode<K,V>)e).nextTable;
3303                          e = null;
3304 +                        pushState(t, i, n);
3305                          continue;
3306                      }
3307                      else if (e instanceof TreeBin)
# Line 3202 | Line 3309 | public class ConcurrentHashMap<K,V> impl
3309                      else
3310                          e = null;
3311                  }
3312 <                if ((index += baseSize) >= n)
3313 <                    index = ++baseIndex;    // visit upper slots if present
3312 >                if (stack != null)
3313 >                    recoverState(n);
3314 >                else if ((index = i + baseSize) >= n)
3315 >                    index = ++baseIndex; // visit upper slots if present
3316              }
3317          }
3318 +
3319 +        /**
3320 +         * Saves traversal state upon encountering a forwarding node.
3321 +         */
3322 +        private void pushState(Node<K,V>[] t, int i, int n) {
3323 +            TableStack<K,V> s = spare;  // reuse if possible
3324 +            if (s != null)
3325 +                spare = s.next;
3326 +            else
3327 +                s = new TableStack<K,V>();
3328 +            s.tab = t;
3329 +            s.length = n;
3330 +            s.index = i;
3331 +            s.next = stack;
3332 +            stack = s;
3333 +        }
3334 +
3335 +        /**
3336 +         * Possibly pops traversal state.
3337 +         *
3338 +         * @param n length of current table
3339 +         */
3340 +        private void recoverState(int n) {
3341 +            TableStack<K,V> s; int len;
3342 +            while ((s = stack) != null && (index += (len = s.length)) >= n) {
3343 +                n = len;
3344 +                index = s.index;
3345 +                tab = s.tab;
3346 +                s.tab = null;
3347 +                TableStack<K,V> next = s.next;
3348 +                s.next = spare; // save for reuse
3349 +                stack = next;
3350 +                spare = s;
3351 +            }
3352 +            if (s == null && (index += baseSize) >= n)
3353 +                index = ++baseIndex;
3354 +        }
3355      }
3356  
3357      /**
3358       * Base of key, value, and entry Iterators. Adds fields to
3359 <     * Traverser to support iterator.remove
3359 >     * Traverser to support iterator.remove.
3360       */
3361      static class BaseIterator<K,V> extends Traverser<K,V> {
3362          final ConcurrentHashMap<K,V> map;
# Line 3498 | Line 3644 | public class ConcurrentHashMap<K,V> impl
3644       * for an element, or null if there is no transformation (in
3645       * which case the action is not applied)
3646       * @param action the action
3647 +     * @param <U> the return type of the transformer
3648       * @since 1.8
3649       */
3650      public <U> void forEach(long parallelismThreshold,
# Line 3521 | Line 3668 | public class ConcurrentHashMap<K,V> impl
3668       * needed for this operation to be executed in parallel
3669       * @param searchFunction a function returning a non-null
3670       * result on success, else null
3671 +     * @param <U> the return type of the search function
3672       * @return a non-null result from applying the given search
3673       * function on each (key, value), or null if none
3674       * @since 1.8
# Line 3544 | Line 3692 | public class ConcurrentHashMap<K,V> impl
3692       * for an element, or null if there is no transformation (in
3693       * which case it is not combined)
3694       * @param reducer a commutative associative combining function
3695 +     * @param <U> the return type of the transformer
3696       * @return the result of accumulating the given transformation
3697       * of all (key, value) pairs
3698       * @since 1.8
# Line 3573 | Line 3722 | public class ConcurrentHashMap<K,V> impl
3722       * of all (key, value) pairs
3723       * @since 1.8
3724       */
3725 <    public double reduceToDoubleIn(long parallelismThreshold,
3726 <                                   ToDoubleBiFunction<? super K, ? super V> transformer,
3727 <                                   double basis,
3728 <                                   DoubleBinaryOperator reducer) {
3725 >    public double reduceToDouble(long parallelismThreshold,
3726 >                                 ToDoubleBiFunction<? super K, ? super V> transformer,
3727 >                                 double basis,
3728 >                                 DoubleBinaryOperator reducer) {
3729          if (transformer == null || reducer == null)
3730              throw new NullPointerException();
3731          return new MapReduceMappingsToDoubleTask<K,V>
# Line 3662 | Line 3811 | public class ConcurrentHashMap<K,V> impl
3811       * for an element, or null if there is no transformation (in
3812       * which case the action is not applied)
3813       * @param action the action
3814 +     * @param <U> the return type of the transformer
3815       * @since 1.8
3816       */
3817      public <U> void forEachKey(long parallelismThreshold,
# Line 3685 | Line 3835 | public class ConcurrentHashMap<K,V> impl
3835       * needed for this operation to be executed in parallel
3836       * @param searchFunction a function returning a non-null
3837       * result on success, else null
3838 +     * @param <U> the return type of the search function
3839       * @return a non-null result from applying the given search
3840       * function on each key, or null if none
3841       * @since 1.8
# Line 3727 | Line 3878 | public class ConcurrentHashMap<K,V> impl
3878       * for an element, or null if there is no transformation (in
3879       * which case it is not combined)
3880       * @param reducer a commutative associative combining function
3881 +     * @param <U> the return type of the transformer
3882       * @return the result of accumulating the given transformation
3883       * of all keys
3884       * @since 1.8
# Line 3846 | Line 3998 | public class ConcurrentHashMap<K,V> impl
3998       * for an element, or null if there is no transformation (in
3999       * which case the action is not applied)
4000       * @param action the action
4001 +     * @param <U> the return type of the transformer
4002       * @since 1.8
4003       */
4004      public <U> void forEachValue(long parallelismThreshold,
# Line 3869 | Line 4022 | public class ConcurrentHashMap<K,V> impl
4022       * needed for this operation to be executed in parallel
4023       * @param searchFunction a function returning a non-null
4024       * result on success, else null
4025 +     * @param <U> the return type of the search function
4026       * @return a non-null result from applying the given search
4027       * function on each value, or null if none
4028       * @since 1.8
# Line 3910 | Line 4064 | public class ConcurrentHashMap<K,V> impl
4064       * for an element, or null if there is no transformation (in
4065       * which case it is not combined)
4066       * @param reducer a commutative associative combining function
4067 +     * @param <U> the return type of the transformer
4068       * @return the result of accumulating the given transformation
4069       * of all values
4070       * @since 1.8
# Line 4027 | Line 4182 | public class ConcurrentHashMap<K,V> impl
4182       * for an element, or null if there is no transformation (in
4183       * which case the action is not applied)
4184       * @param action the action
4185 +     * @param <U> the return type of the transformer
4186       * @since 1.8
4187       */
4188      public <U> void forEachEntry(long parallelismThreshold,
# Line 4050 | Line 4206 | public class ConcurrentHashMap<K,V> impl
4206       * needed for this operation to be executed in parallel
4207       * @param searchFunction a function returning a non-null
4208       * result on success, else null
4209 +     * @param <U> the return type of the search function
4210       * @return a non-null result from applying the given search
4211       * function on each entry, or null if none
4212       * @since 1.8
# Line 4091 | Line 4248 | public class ConcurrentHashMap<K,V> impl
4248       * for an element, or null if there is no transformation (in
4249       * which case it is not combined)
4250       * @param reducer a commutative associative combining function
4251 +     * @param <U> the return type of the transformer
4252       * @return the result of accumulating the given transformation
4253       * of all entries
4254       * @since 1.8
# Line 4213 | Line 4371 | public class ConcurrentHashMap<K,V> impl
4371          // implementations below rely on concrete classes supplying these
4372          // abstract methods
4373          /**
4374 <         * Returns a "weakly consistent" iterator that will never
4375 <         * throw {@link ConcurrentModificationException}, and
4376 <         * guarantees to traverse elements as they existed upon
4377 <         * construction of the iterator, and may (but is not
4378 <         * guaranteed to) reflect any modifications subsequent to
4379 <         * construction.
4374 >         * Returns an iterator over the elements in this collection.
4375 >         *
4376 >         * <p>The returned iterator is
4377 >         * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>.
4378 >         *
4379 >         * @return an iterator over the elements in this collection
4380           */
4381          public abstract Iterator<E> iterator();
4382          public abstract boolean contains(Object o);
# Line 4316 | Line 4474 | public class ConcurrentHashMap<K,V> impl
4474          }
4475  
4476          public final boolean removeAll(Collection<?> c) {
4477 +            if (c == null) throw new NullPointerException();
4478              boolean modified = false;
4479              for (Iterator<E> it = iterator(); it.hasNext();) {
4480                  if (c.contains(it.next())) {
# Line 4327 | Line 4486 | public class ConcurrentHashMap<K,V> impl
4486          }
4487  
4488          public final boolean retainAll(Collection<?> c) {
4489 +            if (c == null) throw new NullPointerException();
4490              boolean modified = false;
4491              for (Iterator<E> it = iterator(); it.hasNext();) {
4492                  if (!c.contains(it.next())) {
# Line 4621 | Line 4781 | public class ConcurrentHashMap<K,V> impl
4781       * Base class for bulk tasks. Repeats some fields and code from
4782       * class Traverser, because we need to subclass CountedCompleter.
4783       */
4784 +    @SuppressWarnings("serial")
4785      abstract static class BulkTask<K,V,R> extends CountedCompleter<R> {
4786          Node<K,V>[] tab;        // same as Traverser
4787          Node<K,V> next;
4788 +        TableStack<K,V> stack, spare;
4789          int index;
4790          int baseIndex;
4791          int baseLimit;
# Line 4652 | Line 4814 | public class ConcurrentHashMap<K,V> impl
4814              if ((e = next) != null)
4815                  e = e.next;
4816              for (;;) {
4817 <                Node<K,V>[] t; int i, n; K ek;  // must use locals in checks
4817 >                Node<K,V>[] t; int i, n;
4818                  if (e != null)
4819                      return next = e;
4820                  if (baseIndex >= baseLimit || (t = tab) == null ||
4821                      (n = t.length) <= (i = index) || i < 0)
4822                      return next = null;
4823 <                if ((e = tabAt(t, index)) != null && e.hash < 0) {
4823 >                if ((e = tabAt(t, i)) != null && e.hash < 0) {
4824                      if (e instanceof ForwardingNode) {
4825                          tab = ((ForwardingNode<K,V>)e).nextTable;
4826                          e = null;
4827 +                        pushState(t, i, n);
4828                          continue;
4829                      }
4830                      else if (e instanceof TreeBin)
# Line 4669 | Line 4832 | public class ConcurrentHashMap<K,V> impl
4832                      else
4833                          e = null;
4834                  }
4835 <                if ((index += baseSize) >= n)
4836 <                    index = ++baseIndex;    // visit upper slots if present
4835 >                if (stack != null)
4836 >                    recoverState(n);
4837 >                else if ((index = i + baseSize) >= n)
4838 >                    index = ++baseIndex;
4839 >            }
4840 >        }
4841 >
4842 >        private void pushState(Node<K,V>[] t, int i, int n) {
4843 >            TableStack<K,V> s = spare;
4844 >            if (s != null)
4845 >                spare = s.next;
4846 >            else
4847 >                s = new TableStack<K,V>();
4848 >            s.tab = t;
4849 >            s.length = n;
4850 >            s.index = i;
4851 >            s.next = stack;
4852 >            stack = s;
4853 >        }
4854 >
4855 >        private void recoverState(int n) {
4856 >            TableStack<K,V> s; int len;
4857 >            while ((s = stack) != null && (index += (len = s.length)) >= n) {
4858 >                n = len;
4859 >                index = s.index;
4860 >                tab = s.tab;
4861 >                s.tab = null;
4862 >                TableStack<K,V> next = s.next;
4863 >                s.next = spare; // save for reuse
4864 >                stack = next;
4865 >                spare = s;
4866              }
4867 +            if (s == null && (index += baseSize) >= n)
4868 +                index = ++baseIndex;
4869          }
4870      }
4871  
# Line 5131 | Line 5325 | public class ConcurrentHashMap<K,V> impl
5325                  result = r;
5326                  CountedCompleter<?> c;
5327                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5328 <                    @SuppressWarnings("unchecked") ReduceKeysTask<K,V>
5328 >                    @SuppressWarnings("unchecked")
5329 >                    ReduceKeysTask<K,V>
5330                          t = (ReduceKeysTask<K,V>)c,
5331                          s = t.rights;
5332                      while (s != null) {
# Line 5178 | Line 5373 | public class ConcurrentHashMap<K,V> impl
5373                  result = r;
5374                  CountedCompleter<?> c;
5375                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5376 <                    @SuppressWarnings("unchecked") ReduceValuesTask<K,V>
5376 >                    @SuppressWarnings("unchecked")
5377 >                    ReduceValuesTask<K,V>
5378                          t = (ReduceValuesTask<K,V>)c,
5379                          s = t.rights;
5380                      while (s != null) {
# Line 5223 | Line 5419 | public class ConcurrentHashMap<K,V> impl
5419                  result = r;
5420                  CountedCompleter<?> c;
5421                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5422 <                    @SuppressWarnings("unchecked") ReduceEntriesTask<K,V>
5422 >                    @SuppressWarnings("unchecked")
5423 >                    ReduceEntriesTask<K,V>
5424                          t = (ReduceEntriesTask<K,V>)c,
5425                          s = t.rights;
5426                      while (s != null) {
# Line 5276 | Line 5473 | public class ConcurrentHashMap<K,V> impl
5473                  result = r;
5474                  CountedCompleter<?> c;
5475                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5476 <                    @SuppressWarnings("unchecked") MapReduceKeysTask<K,V,U>
5476 >                    @SuppressWarnings("unchecked")
5477 >                    MapReduceKeysTask<K,V,U>
5478                          t = (MapReduceKeysTask<K,V,U>)c,
5479                          s = t.rights;
5480                      while (s != null) {
# Line 5329 | Line 5527 | public class ConcurrentHashMap<K,V> impl
5527                  result = r;
5528                  CountedCompleter<?> c;
5529                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5530 <                    @SuppressWarnings("unchecked") MapReduceValuesTask<K,V,U>
5530 >                    @SuppressWarnings("unchecked")
5531 >                    MapReduceValuesTask<K,V,U>
5532                          t = (MapReduceValuesTask<K,V,U>)c,
5533                          s = t.rights;
5534                      while (s != null) {
# Line 5382 | Line 5581 | public class ConcurrentHashMap<K,V> impl
5581                  result = r;
5582                  CountedCompleter<?> c;
5583                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5584 <                    @SuppressWarnings("unchecked") MapReduceEntriesTask<K,V,U>
5584 >                    @SuppressWarnings("unchecked")
5585 >                    MapReduceEntriesTask<K,V,U>
5586                          t = (MapReduceEntriesTask<K,V,U>)c,
5587                          s = t.rights;
5588                      while (s != null) {
# Line 5435 | Line 5635 | public class ConcurrentHashMap<K,V> impl
5635                  result = r;
5636                  CountedCompleter<?> c;
5637                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5638 <                    @SuppressWarnings("unchecked") MapReduceMappingsTask<K,V,U>
5638 >                    @SuppressWarnings("unchecked")
5639 >                    MapReduceMappingsTask<K,V,U>
5640                          t = (MapReduceMappingsTask<K,V,U>)c,
5641                          s = t.rights;
5642                      while (s != null) {
# Line 5487 | Line 5688 | public class ConcurrentHashMap<K,V> impl
5688                  result = r;
5689                  CountedCompleter<?> c;
5690                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5691 <                    @SuppressWarnings("unchecked") MapReduceKeysToDoubleTask<K,V>
5691 >                    @SuppressWarnings("unchecked")
5692 >                    MapReduceKeysToDoubleTask<K,V>
5693                          t = (MapReduceKeysToDoubleTask<K,V>)c,
5694                          s = t.rights;
5695                      while (s != null) {
# Line 5536 | Line 5738 | public class ConcurrentHashMap<K,V> impl
5738                  result = r;
5739                  CountedCompleter<?> c;
5740                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5741 <                    @SuppressWarnings("unchecked") MapReduceValuesToDoubleTask<K,V>
5741 >                    @SuppressWarnings("unchecked")
5742 >                    MapReduceValuesToDoubleTask<K,V>
5743                          t = (MapReduceValuesToDoubleTask<K,V>)c,
5744                          s = t.rights;
5745                      while (s != null) {
# Line 5585 | Line 5788 | public class ConcurrentHashMap<K,V> impl
5788                  result = r;
5789                  CountedCompleter<?> c;
5790                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5791 <                    @SuppressWarnings("unchecked") MapReduceEntriesToDoubleTask<K,V>
5791 >                    @SuppressWarnings("unchecked")
5792 >                    MapReduceEntriesToDoubleTask<K,V>
5793                          t = (MapReduceEntriesToDoubleTask<K,V>)c,
5794                          s = t.rights;
5795                      while (s != null) {
# Line 5634 | Line 5838 | public class ConcurrentHashMap<K,V> impl
5838                  result = r;
5839                  CountedCompleter<?> c;
5840                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5841 <                    @SuppressWarnings("unchecked") MapReduceMappingsToDoubleTask<K,V>
5841 >                    @SuppressWarnings("unchecked")
5842 >                    MapReduceMappingsToDoubleTask<K,V>
5843                          t = (MapReduceMappingsToDoubleTask<K,V>)c,
5844                          s = t.rights;
5845                      while (s != null) {
# Line 5683 | Line 5888 | public class ConcurrentHashMap<K,V> impl
5888                  result = r;
5889                  CountedCompleter<?> c;
5890                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5891 <                    @SuppressWarnings("unchecked") MapReduceKeysToLongTask<K,V>
5891 >                    @SuppressWarnings("unchecked")
5892 >                    MapReduceKeysToLongTask<K,V>
5893                          t = (MapReduceKeysToLongTask<K,V>)c,
5894                          s = t.rights;
5895                      while (s != null) {
# Line 5732 | Line 5938 | public class ConcurrentHashMap<K,V> impl
5938                  result = r;
5939                  CountedCompleter<?> c;
5940                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5941 <                    @SuppressWarnings("unchecked") MapReduceValuesToLongTask<K,V>
5941 >                    @SuppressWarnings("unchecked")
5942 >                    MapReduceValuesToLongTask<K,V>
5943                          t = (MapReduceValuesToLongTask<K,V>)c,
5944                          s = t.rights;
5945                      while (s != null) {
# Line 5781 | Line 5988 | public class ConcurrentHashMap<K,V> impl
5988                  result = r;
5989                  CountedCompleter<?> c;
5990                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5991 <                    @SuppressWarnings("unchecked") MapReduceEntriesToLongTask<K,V>
5991 >                    @SuppressWarnings("unchecked")
5992 >                    MapReduceEntriesToLongTask<K,V>
5993                          t = (MapReduceEntriesToLongTask<K,V>)c,
5994                          s = t.rights;
5995                      while (s != null) {
# Line 5830 | Line 6038 | public class ConcurrentHashMap<K,V> impl
6038                  result = r;
6039                  CountedCompleter<?> c;
6040                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6041 <                    @SuppressWarnings("unchecked") MapReduceMappingsToLongTask<K,V>
6041 >                    @SuppressWarnings("unchecked")
6042 >                    MapReduceMappingsToLongTask<K,V>
6043                          t = (MapReduceMappingsToLongTask<K,V>)c,
6044                          s = t.rights;
6045                      while (s != null) {
# Line 5879 | Line 6088 | public class ConcurrentHashMap<K,V> impl
6088                  result = r;
6089                  CountedCompleter<?> c;
6090                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6091 <                    @SuppressWarnings("unchecked") MapReduceKeysToIntTask<K,V>
6091 >                    @SuppressWarnings("unchecked")
6092 >                    MapReduceKeysToIntTask<K,V>
6093                          t = (MapReduceKeysToIntTask<K,V>)c,
6094                          s = t.rights;
6095                      while (s != null) {
# Line 5928 | Line 6138 | public class ConcurrentHashMap<K,V> impl
6138                  result = r;
6139                  CountedCompleter<?> c;
6140                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6141 <                    @SuppressWarnings("unchecked") MapReduceValuesToIntTask<K,V>
6141 >                    @SuppressWarnings("unchecked")
6142 >                    MapReduceValuesToIntTask<K,V>
6143                          t = (MapReduceValuesToIntTask<K,V>)c,
6144                          s = t.rights;
6145                      while (s != null) {
# Line 5977 | Line 6188 | public class ConcurrentHashMap<K,V> impl
6188                  result = r;
6189                  CountedCompleter<?> c;
6190                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6191 <                    @SuppressWarnings("unchecked") MapReduceEntriesToIntTask<K,V>
6191 >                    @SuppressWarnings("unchecked")
6192 >                    MapReduceEntriesToIntTask<K,V>
6193                          t = (MapReduceEntriesToIntTask<K,V>)c,
6194                          s = t.rights;
6195                      while (s != null) {
# Line 6026 | Line 6238 | public class ConcurrentHashMap<K,V> impl
6238                  result = r;
6239                  CountedCompleter<?> c;
6240                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6241 <                    @SuppressWarnings("unchecked") MapReduceMappingsToIntTask<K,V>
6241 >                    @SuppressWarnings("unchecked")
6242 >                    MapReduceMappingsToIntTask<K,V>
6243                          t = (MapReduceMappingsToIntTask<K,V>)c,
6244                          s = t.rights;
6245                      while (s != null) {
# Line 6042 | Line 6255 | public class ConcurrentHashMap<K,V> impl
6255      private static final sun.misc.Unsafe U;
6256      private static final long SIZECTL;
6257      private static final long TRANSFERINDEX;
6045    private static final long TRANSFERORIGIN;
6258      private static final long BASECOUNT;
6259      private static final long CELLSBUSY;
6260      private static final long CELLVALUE;
# Line 6057 | Line 6269 | public class ConcurrentHashMap<K,V> impl
6269                  (k.getDeclaredField("sizeCtl"));
6270              TRANSFERINDEX = U.objectFieldOffset
6271                  (k.getDeclaredField("transferIndex"));
6060            TRANSFERORIGIN = U.objectFieldOffset
6061                (k.getDeclaredField("transferOrigin"));
6272              BASECOUNT = U.objectFieldOffset
6273                  (k.getDeclaredField("baseCount"));
6274              CELLSBUSY = U.objectFieldOffset
# Line 6070 | Line 6280 | public class ConcurrentHashMap<K,V> impl
6280              ABASE = U.arrayBaseOffset(ak);
6281              int scale = U.arrayIndexScale(ak);
6282              if ((scale & (scale - 1)) != 0)
6283 <                throw new Error("data type scale not a power of two");
6283 >                throw new Error("array index scale not a power of two");
6284              ASHIFT = 31 - Integer.numberOfLeadingZeros(scale);
6285 <        } catch (Exception e) {
6285 >        } catch (ReflectiveOperationException e) {
6286              throw new Error(e);
6287          }
6288      }

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines