ViewVC Help
View File | Revision Log | Show Annotations | Download File | Root Listing
root/jsr166/jsr166/src/main/java/util/concurrent/ConcurrentHashMap.java
(Generate patch)

Comparing jsr166/src/main/java/util/concurrent/ConcurrentHashMap.java (file contents):
Revision 1.228 by jsr166, Tue Jun 18 18:39:14 2013 UTC vs.
Revision 1.265 by jsr166, Tue Feb 17 20:03:22 2015 UTC

# Line 10 | Line 10 | import java.io.ObjectStreamField;
10   import java.io.Serializable;
11   import java.lang.reflect.ParameterizedType;
12   import java.lang.reflect.Type;
13 + import java.util.AbstractMap;
14   import java.util.Arrays;
15   import java.util.Collection;
15 import java.util.Comparator;
16 import java.util.ConcurrentModificationException;
16   import java.util.Enumeration;
17   import java.util.HashMap;
18   import java.util.Hashtable;
# Line 22 | Line 21 | import java.util.Map;
21   import java.util.NoSuchElementException;
22   import java.util.Set;
23   import java.util.Spliterator;
25 import java.util.concurrent.ConcurrentMap;
26 import java.util.concurrent.ForkJoinPool;
24   import java.util.concurrent.atomic.AtomicReference;
25   import java.util.concurrent.locks.LockSupport;
26   import java.util.concurrent.locks.ReentrantLock;
27   import java.util.function.BiConsumer;
28   import java.util.function.BiFunction;
32 import java.util.function.BinaryOperator;
29   import java.util.function.Consumer;
30   import java.util.function.DoubleBinaryOperator;
31   import java.util.function.Function;
# Line 64 | Line 60 | import java.util.stream.Stream;
60   * that key reporting the updated value.)  For aggregate operations
61   * such as {@code putAll} and {@code clear}, concurrent retrievals may
62   * reflect insertion or removal of only some entries.  Similarly,
63 < * Iterators and Enumerations return elements reflecting the state of
64 < * the hash table at some point at or since the creation of the
63 > * Iterators, Spliterators and Enumerations return elements reflecting the
64 > * state of the hash table at some point at or since the creation of the
65   * iterator/enumeration.  They do <em>not</em> throw {@link
66 < * ConcurrentModificationException}.  However, iterators are designed
67 < * to be used by only one thread at a time.  Bear in mind that the
68 < * results of aggregate status methods including {@code size}, {@code
69 < * isEmpty}, and {@code containsValue} are typically useful only when
70 < * a map is not undergoing concurrent updates in other threads.
66 > * java.util.ConcurrentModificationException ConcurrentModificationException}.
67 > * However, iterators are designed to be used by only one thread at a time.
68 > * Bear in mind that the results of aggregate status methods including
69 > * {@code size}, {@code isEmpty}, and {@code containsValue} are typically
70 > * useful only when a map is not undergoing concurrent updates in other threads.
71   * Otherwise the results of these methods reflect transient states
72   * that may be adequate for monitoring or estimation purposes, but not
73   * for program control.
# Line 104 | Line 100 | import java.util.stream.Stream;
100   * mapped values are (perhaps transiently) not used or all take the
101   * same mapping value.
102   *
103 < * <p>A ConcurrentHashMap can be used as scalable frequency map (a
103 > * <p>A ConcurrentHashMap can be used as a scalable frequency map (a
104   * form of histogram or multiset) by using {@link
105   * java.util.concurrent.atomic.LongAdder} values and initializing via
106   * {@link #computeIfAbsent computeIfAbsent}. For example, to add a count
107   * to a {@code ConcurrentHashMap<String,LongAdder> freqs}, you can use
108 < * {@code freqs.computeIfAbsent(k -> new LongAdder()).increment();}
108 > * {@code freqs.computeIfAbsent(key, k -> new LongAdder()).increment();}
109   *
110   * <p>This class and its views and iterators implement all of the
111   * <em>optional</em> methods of the {@link Map} and {@link Iterator}
# Line 131 | Line 127 | import java.util.stream.Stream;
127   * of supplied functions should not depend on any ordering, or on any
128   * other objects or values that may transiently change while
129   * computation is in progress; and except for forEach actions, should
130 < * ideally be side-effect-free. Bulk operations on {@link Map.Entry}
130 > * ideally be side-effect-free. Bulk operations on {@link java.util.Map.Entry}
131   * objects do not support method {@code setValue}.
132   *
133   * <ul>
# Line 235 | Line 231 | import java.util.stream.Stream;
231   * @param <K> the type of keys maintained by this map
232   * @param <V> the type of mapped values
233   */
234 < public class ConcurrentHashMap<K,V> implements ConcurrentMap<K,V>, Serializable {
234 > public class ConcurrentHashMap<K,V> extends AbstractMap<K,V>
235 >    implements ConcurrentMap<K,V>, Serializable {
236      private static final long serialVersionUID = 7249069246763182397L;
237  
238      /*
# Line 262 | Line 259 | public class ConcurrentHashMap<K,V> impl
259       * because they have negative hash fields and null key and value
260       * fields. (These special nodes are either uncommon or transient,
261       * so the impact of carrying around some unused fields is
262 <     * insignficant.)
262 >     * insignificant.)
263       *
264       * The table is lazily initialized to a power-of-two size upon the
265       * first insertion.  Each bin in the table normally contains a
# Line 343 | Line 340 | public class ConcurrentHashMap<K,V> impl
340       * The table is resized when occupancy exceeds a percentage
341       * threshold (nominally, 0.75, but see below).  Any thread
342       * noticing an overfull bin may assist in resizing after the
343 <     * initiating thread allocates and sets up the replacement
344 <     * array. However, rather than stalling, these other threads may
345 <     * proceed with insertions etc.  The use of TreeBins shields us
346 <     * from the worst case effects of overfilling while resizes are in
343 >     * initiating thread allocates and sets up the replacement array.
344 >     * However, rather than stalling, these other threads may proceed
345 >     * with insertions etc.  The use of TreeBins shields us from the
346 >     * worst case effects of overfilling while resizes are in
347       * progress.  Resizing proceeds by transferring bins, one by one,
348 <     * from the table to the next table. To enable concurrency, the
349 <     * next table must be (incrementally) prefilled with place-holders
350 <     * serving as reverse forwarders to the old table.  Because we are
348 >     * from the table to the next table. However, threads claim small
349 >     * blocks of indices to transfer (via field transferIndex) before
350 >     * doing so, reducing contention.  A generation stamp in field
351 >     * sizeCtl ensures that resizings do not overlap. Because we are
352       * using power-of-two expansion, the elements from each bin must
353       * either stay at same index, or move with a power of two
354       * offset. We eliminate unnecessary node creation by catching
# Line 371 | Line 369 | public class ConcurrentHashMap<K,V> impl
369       * locks, average aggregate waits become shorter as resizing
370       * progresses.  The transfer operation must also ensure that all
371       * accessible bins in both the old and new table are usable by any
372 <     * traversal.  This is arranged by proceeding from the last bin
373 <     * (table.length - 1) up towards the first.  Upon seeing a
374 <     * forwarding node, traversals (see class Traverser) arrange to
375 <     * move to the new table without revisiting nodes.  However, to
376 <     * ensure that no intervening nodes are skipped, bin splitting can
377 <     * only begin after the associated reverse-forwarders are in
378 <     * place.
372 >     * traversal.  This is arranged in part by proceeding from the
373 >     * last bin (table.length - 1) up towards the first.  Upon seeing
374 >     * a forwarding node, traversals (see class Traverser) arrange to
375 >     * move to the new table without revisiting nodes.  To ensure that
376 >     * no intervening nodes are skipped even when moved out of order,
377 >     * a stack (see class TableStack) is created on first encounter of
378 >     * a forwarding node during a traversal, to maintain its place if
379 >     * later processing the current table. The need for these
380 >     * save/restore mechanics is relatively rare, but when one
381 >     * forwarding node is encountered, typically many more will be.
382 >     * So Traversers use a simple caching scheme to avoid creating so
383 >     * many new TableStack nodes. (Thanks to Peter Levart for
384 >     * suggesting use of a stack here.)
385       *
386       * The traversal scheme also applies to partial traversals of
387       * ranges of bins (via an alternate Traverser constructor)
# Line 409 | Line 413 | public class ConcurrentHashMap<K,V> impl
413       * related operations (which is the main reason we cannot use
414       * existing collections such as TreeMaps). TreeBins contain
415       * Comparable elements, but may contain others, as well as
416 <     * elements that are Comparable but not necessarily Comparable
417 <     * for the same T, so we cannot invoke compareTo among them. To
418 <     * handle this, the tree is ordered primarily by hash value, then
419 <     * by Comparable.compareTo order if applicable.  On lookup at a
420 <     * node, if elements are not comparable or compare as 0 then both
421 <     * left and right children may need to be searched in the case of
422 <     * tied hash values. (This corresponds to the full list search
423 <     * that would be necessary if all elements were non-Comparable and
424 <     * had tied hashes.)  The red-black balancing code is updated from
425 <     * pre-jdk-collections
416 >     * elements that are Comparable but not necessarily Comparable for
417 >     * the same T, so we cannot invoke compareTo among them. To handle
418 >     * this, the tree is ordered primarily by hash value, then by
419 >     * Comparable.compareTo order if applicable.  On lookup at a node,
420 >     * if elements are not comparable or compare as 0 then both left
421 >     * and right children may need to be searched in the case of tied
422 >     * hash values. (This corresponds to the full list search that
423 >     * would be necessary if all elements were non-Comparable and had
424 >     * tied hashes.) On insertion, to keep a total ordering (or as
425 >     * close as is required here) across rebalancings, we compare
426 >     * classes and identityHashCodes as tie-breakers. The red-black
427 >     * balancing code is updated from pre-jdk-collections
428       * (http://gee.cs.oswego.edu/dl/classes/collections/RBCell.java)
429       * based in turn on Cormen, Leiserson, and Rivest "Introduction to
430       * Algorithms" (CLR).
431       *
432       * TreeBins also require an additional locking mechanism.  While
433       * list traversal is always possible by readers even during
434 <     * updates, tree traversal is not, mainly beause of tree-rotations
434 >     * updates, tree traversal is not, mainly because of tree-rotations
435       * that may change the root node and/or its linkages.  TreeBins
436       * include a simple read-write lock mechanism parasitic on the
437       * main bin-synchronization strategy: Structural adjustments
# Line 448 | Line 454 | public class ConcurrentHashMap<K,V> impl
454       * unused "Segment" class that is instantiated in minimal form
455       * only when serializing.
456       *
457 +     * Also, solely for compatibility with previous versions of this
458 +     * class, it extends AbstractMap, even though all of its methods
459 +     * are overridden, so it is just useless baggage.
460 +     *
461       * This file is organized to make things a little easier to follow
462       * while reading than they might otherwise: First the main static
463       * declarations and utilities, then fields, then main public
# Line 528 | Line 538 | public class ConcurrentHashMap<K,V> impl
538       */
539      private static final int MIN_TRANSFER_STRIDE = 16;
540  
541 +    /**
542 +     * The number of bits used for generation stamp in sizeCtl.
543 +     * Must be at least 6 for 32bit arrays.
544 +     */
545 +    private static int RESIZE_STAMP_BITS = 16;
546 +
547 +    /**
548 +     * The maximum number of threads that can help resize.
549 +     * Must fit in 32 - RESIZE_STAMP_BITS bits.
550 +     */
551 +    private static final int MAX_RESIZERS = (1 << (32 - RESIZE_STAMP_BITS)) - 1;
552 +
553 +    /**
554 +     * The bit shift for recording size stamp in sizeCtl.
555 +     */
556 +    private static final int RESIZE_STAMP_SHIFT = 32 - RESIZE_STAMP_BITS;
557 +
558      /*
559       * Encodings for Node hash fields. See above for explanation.
560       */
561 <    static final int MOVED     = 0x8fffffff; // (-1) hash for forwarding nodes
562 <    static final int TREEBIN   = 0x80000000; // hash for heads of treea
563 <    static final int RESERVED  = 0x80000001; // hash for transient reservations
561 >    static final int MOVED     = -1; // hash for forwarding nodes
562 >    static final int TREEBIN   = -2; // hash for roots of trees
563 >    static final int RESERVED  = -3; // hash for transient reservations
564      static final int HASH_BITS = 0x7fffffff; // usable bits of normal node hash
565  
566      /** Number of CPUS, to place bounds on some sizings */
# Line 552 | Line 579 | public class ConcurrentHashMap<K,V> impl
579       * Key-value entry.  This class is never exported out as a
580       * user-mutable Map.Entry (i.e., one supporting setValue; see
581       * MapEntry below), but can be used for read-only traversals used
582 <     * in bulk tasks.  Subclasses of Node with a negativehash field
582 >     * in bulk tasks.  Subclasses of Node with a negative hash field
583       * are special, and contain null keys and values (but are never
584       * exported).  Otherwise, keys and vals are never null.
585       */
# Line 560 | Line 587 | public class ConcurrentHashMap<K,V> impl
587          final int hash;
588          final K key;
589          volatile V val;
590 <        Node<K,V> next;
590 >        volatile Node<K,V> next;
591  
592          Node(int hash, K key, V val, Node<K,V> next) {
593              this.hash = hash;
# Line 569 | Line 596 | public class ConcurrentHashMap<K,V> impl
596              this.next = next;
597          }
598  
599 <        public final K getKey()       { return key; }
600 <        public final V getValue()     { return val; }
601 <        public final int hashCode()   { return key.hashCode() ^ val.hashCode(); }
602 <        public final String toString(){ return key + "=" + val; }
599 >        public final K getKey()        { return key; }
600 >        public final V getValue()      { return val; }
601 >        public final String toString() { return key + "=" + val; }
602 >        public final int hashCode() {
603 >            return key.hashCode() ^ val.hashCode();
604 >        }
605          public final V setValue(V value) {
606              throw new UnsupportedOperationException();
607          }
# Line 685 | Line 714 | public class ConcurrentHashMap<K,V> impl
714       * errors by users, these checks must operate on local variables,
715       * which accounts for some odd-looking inline assignments below.
716       * Note that calls to setTabAt always occur within locked regions,
717 <     * and so do not need full volatile semantics, but still require
718 <     * ordering to maintain concurrent readability.
717 >     * and so in principle require only release ordering, not
718 >     * full volatile semantics, but are currently coded as volatile
719 >     * writes to be conservative.
720       */
721  
722      @SuppressWarnings("unchecked")
# Line 700 | Line 730 | public class ConcurrentHashMap<K,V> impl
730      }
731  
732      static final <K,V> void setTabAt(Node<K,V>[] tab, int i, Node<K,V> v) {
733 <        U.putOrderedObject(tab, ((long)i << ASHIFT) + ABASE, v);
733 >        U.putObjectVolatile(tab, ((long)i << ASHIFT) + ABASE, v);
734      }
735  
736      /* ---------------- Fields -------------- */
# Line 739 | Line 769 | public class ConcurrentHashMap<K,V> impl
769      private transient volatile int transferIndex;
770  
771      /**
742     * The least available table index to split while resizing.
743     */
744    private transient volatile int transferOrigin;
745
746    /**
772       * Spinlock (locked via CAS) used when resizing and/or creating CounterCells.
773       */
774      private transient volatile int cellsBusy;
# Line 1000 | Line 1025 | public class ConcurrentHashMap<K,V> impl
1025                                      p.val = value;
1026                              }
1027                          }
1028 +                        else if (f instanceof ReservationNode)
1029 +                            throw new IllegalStateException("Recursive update");
1030                      }
1031                  }
1032                  if (binCount != 0) {
# Line 1102 | Line 1129 | public class ConcurrentHashMap<K,V> impl
1129                                  }
1130                              }
1131                          }
1132 +                        else if (f instanceof ReservationNode)
1133 +                            throw new IllegalStateException("Recursive update");
1134                      }
1135                  }
1136                  if (validated) {
# Line 1162 | Line 1191 | public class ConcurrentHashMap<K,V> impl
1191       * operations.  It does not support the {@code add} or
1192       * {@code addAll} operations.
1193       *
1194 <     * <p>The view's {@code iterator} is a "weakly consistent" iterator
1195 <     * that will never throw {@link ConcurrentModificationException},
1196 <     * and guarantees to traverse elements as they existed upon
1197 <     * construction of the iterator, and may (but is not guaranteed to)
1198 <     * reflect any modifications subsequent to construction.
1194 >     * <p>The view's iterators and spliterators are
1195 >     * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>.
1196 >     *
1197 >     * <p>The view's {@code spliterator} reports {@link Spliterator#CONCURRENT},
1198 >     * {@link Spliterator#DISTINCT}, and {@link Spliterator#NONNULL}.
1199       *
1200       * @return the set view
1201       */
# Line 1185 | Line 1214 | public class ConcurrentHashMap<K,V> impl
1214       * {@code retainAll}, and {@code clear} operations.  It does not
1215       * support the {@code add} or {@code addAll} operations.
1216       *
1217 <     * <p>The view's {@code iterator} is a "weakly consistent" iterator
1218 <     * that will never throw {@link ConcurrentModificationException},
1219 <     * and guarantees to traverse elements as they existed upon
1220 <     * construction of the iterator, and may (but is not guaranteed to)
1221 <     * reflect any modifications subsequent to construction.
1217 >     * <p>The view's iterators and spliterators are
1218 >     * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>.
1219 >     *
1220 >     * <p>The view's {@code spliterator} reports {@link Spliterator#CONCURRENT}
1221 >     * and {@link Spliterator#NONNULL}.
1222       *
1223       * @return the collection view
1224       */
# Line 1207 | Line 1236 | public class ConcurrentHashMap<K,V> impl
1236       * {@code removeAll}, {@code retainAll}, and {@code clear}
1237       * operations.
1238       *
1239 <     * <p>The view's {@code iterator} is a "weakly consistent" iterator
1240 <     * that will never throw {@link ConcurrentModificationException},
1241 <     * and guarantees to traverse elements as they existed upon
1242 <     * construction of the iterator, and may (but is not guaranteed to)
1243 <     * reflect any modifications subsequent to construction.
1239 >     * <p>The view's iterators and spliterators are
1240 >     * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>.
1241 >     *
1242 >     * <p>The view's {@code spliterator} reports {@link Spliterator#CONCURRENT},
1243 >     * {@link Spliterator#DISTINCT}, and {@link Spliterator#NONNULL}.
1244       *
1245       * @return the set view
1246       */
# Line 1321 | Line 1350 | public class ConcurrentHashMap<K,V> impl
1350       * Saves the state of the {@code ConcurrentHashMap} instance to a
1351       * stream (i.e., serializes it).
1352       * @param s the stream
1353 +     * @throws java.io.IOException if an I/O error occurs
1354       * @serialData
1355       * the key (Object) and value (Object)
1356       * for each key-value mapping, followed by a null pair.
# Line 1338 | Line 1368 | public class ConcurrentHashMap<K,V> impl
1368          }
1369          int segmentShift = 32 - sshift;
1370          int segmentMask = ssize - 1;
1371 <        @SuppressWarnings("unchecked") Segment<K,V>[] segments = (Segment<K,V>[])
1371 >        @SuppressWarnings("unchecked")
1372 >        Segment<K,V>[] segments = (Segment<K,V>[])
1373              new Segment<?,?>[DEFAULT_CONCURRENCY_LEVEL];
1374          for (int i = 0; i < segments.length; ++i)
1375              segments[i] = new Segment<K,V>(LOAD_FACTOR);
# Line 1363 | Line 1394 | public class ConcurrentHashMap<K,V> impl
1394      /**
1395       * Reconstitutes the instance from a stream (that is, deserializes it).
1396       * @param s the stream
1397 +     * @throws ClassNotFoundException if the class of a serialized object
1398 +     *         could not be found
1399 +     * @throws java.io.IOException if an I/O error occurs
1400       */
1401      private void readObject(java.io.ObjectInputStream s)
1402          throws java.io.IOException, ClassNotFoundException {
# Line 1378 | Line 1412 | public class ConcurrentHashMap<K,V> impl
1412          long size = 0L;
1413          Node<K,V> p = null;
1414          for (;;) {
1415 <            @SuppressWarnings("unchecked") K k = (K) s.readObject();
1416 <            @SuppressWarnings("unchecked") V v = (V) s.readObject();
1415 >            @SuppressWarnings("unchecked")
1416 >            K k = (K) s.readObject();
1417 >            @SuppressWarnings("unchecked")
1418 >            V v = (V) s.readObject();
1419              if (k != null && v != null) {
1420                  p = new Node<K,V>(spread(k.hashCode()), k, v, p);
1421                  ++size;
# Line 1397 | Line 1433 | public class ConcurrentHashMap<K,V> impl
1433                  int sz = (int)size;
1434                  n = tableSizeFor(sz + (sz >>> 1) + 1);
1435              }
1436 <            @SuppressWarnings({"rawtypes","unchecked"})
1437 <                Node<K,V>[] tab = (Node<K,V>[])new Node[n];
1436 >            @SuppressWarnings("unchecked")
1437 >            Node<K,V>[] tab = (Node<K,V>[])new Node<?,?>[n];
1438              int mask = n - 1;
1439              long added = 0L;
1440              while (p != null) {
# Line 1623 | Line 1659 | public class ConcurrentHashMap<K,V> impl
1659                                  Node<K,V> pred = e;
1660                                  if ((e = e.next) == null) {
1661                                      if ((val = mappingFunction.apply(key)) != null) {
1662 +                                        if (pred.next != null)
1663 +                                            throw new IllegalStateException("Recursive update");
1664                                          added = true;
1665                                          pred.next = new Node<K,V>(h, key, val, null);
1666                                      }
# Line 1642 | Line 1680 | public class ConcurrentHashMap<K,V> impl
1680                                  t.putTreeVal(h, key, val);
1681                              }
1682                          }
1683 +                        else if (f instanceof ReservationNode)
1684 +                            throw new IllegalStateException("Recursive update");
1685                      }
1686                  }
1687                  if (binCount != 0) {
# Line 1737 | Line 1777 | public class ConcurrentHashMap<K,V> impl
1777                                  }
1778                              }
1779                          }
1780 +                        else if (f instanceof ReservationNode)
1781 +                            throw new IllegalStateException("Recursive update");
1782                      }
1783                  }
1784                  if (binCount != 0)
# Line 1828 | Line 1870 | public class ConcurrentHashMap<K,V> impl
1870                                  if ((e = e.next) == null) {
1871                                      val = remappingFunction.apply(key, null);
1872                                      if (val != null) {
1873 +                                        if (pred.next != null)
1874 +                                            throw new IllegalStateException("Recursive update");
1875                                          delta = 1;
1876                                          pred.next =
1877                                              new Node<K,V>(h, key, val, null);
# Line 1860 | Line 1904 | public class ConcurrentHashMap<K,V> impl
1904                                      setTabAt(tab, i, untreeify(t.first));
1905                              }
1906                          }
1907 +                        else if (f instanceof ReservationNode)
1908 +                            throw new IllegalStateException("Recursive update");
1909                      }
1910                  }
1911                  if (binCount != 0) {
# Line 1969 | Line 2015 | public class ConcurrentHashMap<K,V> impl
2015                                      setTabAt(tab, i, untreeify(t.first));
2016                              }
2017                          }
2018 +                        else if (f instanceof ReservationNode)
2019 +                            throw new IllegalStateException("Recursive update");
2020                      }
2021                  }
2022                  if (binCount != 0) {
# Line 1987 | Line 2035 | public class ConcurrentHashMap<K,V> impl
2035  
2036      /**
2037       * Legacy method testing if some key maps into the specified value
2038 <     * in this table.  This method is identical in functionality to
2038 >     * in this table.
2039 >     *
2040 >     * @deprecated This method is identical in functionality to
2041       * {@link #containsValue(Object)}, and exists solely to ensure
2042       * full compatibility with class {@link java.util.Hashtable},
2043       * which supported this method prior to introduction of the
# Line 2000 | Line 2050 | public class ConcurrentHashMap<K,V> impl
2050       *         {@code false} otherwise
2051       * @throws NullPointerException if the specified value is null
2052       */
2053 <    @Deprecated public boolean contains(Object value) {
2053 >    @Deprecated
2054 >    public boolean contains(Object value) {
2055          return containsValue(value);
2056      }
2057  
# Line 2049 | Line 2100 | public class ConcurrentHashMap<K,V> impl
2100       * Creates a new {@link Set} backed by a ConcurrentHashMap
2101       * from the given type to {@code Boolean.TRUE}.
2102       *
2103 +     * @param <K> the element type of the returned set
2104       * @return the new set
2105       * @since 1.8
2106       */
# Line 2063 | Line 2115 | public class ConcurrentHashMap<K,V> impl
2115       *
2116       * @param initialCapacity The implementation performs internal
2117       * sizing to accommodate this many elements.
2118 +     * @param <K> the element type of the returned set
2119 +     * @return the new set
2120       * @throws IllegalArgumentException if the initial capacity of
2121       * elements is negative
2068     * @return the new set
2122       * @since 1.8
2123       */
2124      public static <K> KeySetView<K,Boolean> newKeySet(int initialCapacity) {
# Line 2103 | Line 2156 | public class ConcurrentHashMap<K,V> impl
2156          }
2157  
2158          Node<K,V> find(int h, Object k) {
2159 <            Node<K,V> e; int n;
2160 <            Node<K,V>[] tab = nextTable;
2161 <            if (k != null && tab != null && (n = tab.length) > 0 &&
2162 <                (e = tabAt(tab, (n - 1) & h)) != null) {
2163 <                do {
2159 >            // loop to avoid arbitrarily deep recursion on forwarding nodes
2160 >            outer: for (Node<K,V>[] tab = nextTable;;) {
2161 >                Node<K,V> e; int n;
2162 >                if (k == null || tab == null || (n = tab.length) == 0 ||
2163 >                    (e = tabAt(tab, (n - 1) & h)) == null)
2164 >                    return null;
2165 >                for (;;) {
2166                      int eh; K ek;
2167                      if ((eh = e.hash) == h &&
2168                          ((ek = e.key) == k || (ek != null && k.equals(ek))))
2169                          return e;
2170 <                    if (eh < 0)
2171 <                        return e.find(h, k);
2172 <                } while ((e = e.next) != null);
2170 >                    if (eh < 0) {
2171 >                        if (e instanceof ForwardingNode) {
2172 >                            tab = ((ForwardingNode<K,V>)e).nextTable;
2173 >                            continue outer;
2174 >                        }
2175 >                        else
2176 >                            return e.find(h, k);
2177 >                    }
2178 >                    if ((e = e.next) == null)
2179 >                        return null;
2180 >                }
2181              }
2119            return null;
2182          }
2183      }
2184  
# Line 2136 | Line 2198 | public class ConcurrentHashMap<K,V> impl
2198      /* ---------------- Table Initialization and Resizing -------------- */
2199  
2200      /**
2201 +     * Returns the stamp bits for resizing a table of size n.
2202 +     * Must be negative when shifted left by RESIZE_STAMP_SHIFT.
2203 +     */
2204 +    static final int resizeStamp(int n) {
2205 +        return Integer.numberOfLeadingZeros(n) | (1 << (RESIZE_STAMP_BITS - 1));
2206 +    }
2207 +
2208 +    /**
2209       * Initializes table, using the size recorded in sizeCtl.
2210       */
2211      private final Node<K,V>[] initTable() {
# Line 2147 | Line 2217 | public class ConcurrentHashMap<K,V> impl
2217                  try {
2218                      if ((tab = table) == null || tab.length == 0) {
2219                          int n = (sc > 0) ? sc : DEFAULT_CAPACITY;
2220 <                        @SuppressWarnings({"rawtypes","unchecked"})
2221 <                            Node<K,V>[] nt = (Node<K,V>[])new Node[n];
2220 >                        @SuppressWarnings("unchecked")
2221 >                        Node<K,V>[] nt = (Node<K,V>[])new Node<?,?>[n];
2222                          table = tab = nt;
2223                          sc = n - (n >>> 2);
2224                      }
# Line 2189 | Line 2259 | public class ConcurrentHashMap<K,V> impl
2259              s = sumCount();
2260          }
2261          if (check >= 0) {
2262 <            Node<K,V>[] tab, nt; int sc;
2262 >            Node<K,V>[] tab, nt; int n, sc;
2263              while (s >= (long)(sc = sizeCtl) && (tab = table) != null &&
2264 <                   tab.length < MAXIMUM_CAPACITY) {
2264 >                   (n = tab.length) < MAXIMUM_CAPACITY) {
2265 >                int rs = resizeStamp(n);
2266                  if (sc < 0) {
2267 <                    if (sc == -1 || transferIndex <= transferOrigin ||
2268 <                        (nt = nextTable) == null)
2267 >                    if ((sc >>> RESIZE_STAMP_SHIFT) != rs || sc == rs + 1 ||
2268 >                        sc == rs + MAX_RESIZERS || (nt = nextTable) == null ||
2269 >                        transferIndex <= 0)
2270                          break;
2271 <                    if (U.compareAndSwapInt(this, SIZECTL, sc, sc - 1))
2271 >                    if (U.compareAndSwapInt(this, SIZECTL, sc, sc + 1))
2272                          transfer(tab, nt);
2273                  }
2274 <                else if (U.compareAndSwapInt(this, SIZECTL, sc, -2))
2274 >                else if (U.compareAndSwapInt(this, SIZECTL, sc,
2275 >                                             (rs << RESIZE_STAMP_SHIFT) + 2))
2276                      transfer(tab, null);
2277                  s = sumCount();
2278              }
# Line 2211 | Line 2284 | public class ConcurrentHashMap<K,V> impl
2284       */
2285      final Node<K,V>[] helpTransfer(Node<K,V>[] tab, Node<K,V> f) {
2286          Node<K,V>[] nextTab; int sc;
2287 <        if ((f instanceof ForwardingNode) &&
2287 >        if (tab != null && (f instanceof ForwardingNode) &&
2288              (nextTab = ((ForwardingNode<K,V>)f).nextTable) != null) {
2289 <            if (nextTab == nextTable && tab == table &&
2290 <                transferIndex > transferOrigin && (sc = sizeCtl) < -1 &&
2291 <                U.compareAndSwapInt(this, SIZECTL, sc, sc - 1))
2292 <                transfer(tab, nextTab);
2289 >            int rs = resizeStamp(tab.length);
2290 >            while (nextTab == nextTable && table == tab &&
2291 >                   (sc = sizeCtl) < 0) {
2292 >                if ((sc >>> RESIZE_STAMP_SHIFT) != rs || sc == rs + 1 ||
2293 >                    sc == rs + MAX_RESIZERS || transferIndex <= 0)
2294 >                    break;
2295 >                if (U.compareAndSwapInt(this, SIZECTL, sc, sc + 1)) {
2296 >                    transfer(tab, nextTab);
2297 >                    break;
2298 >                }
2299 >            }
2300              return nextTab;
2301          }
2302          return table;
# Line 2238 | Line 2318 | public class ConcurrentHashMap<K,V> impl
2318                  if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
2319                      try {
2320                          if (table == tab) {
2321 <                            @SuppressWarnings({"rawtypes","unchecked"})
2322 <                                Node<K,V>[] nt = (Node<K,V>[])new Node[n];
2321 >                            @SuppressWarnings("unchecked")
2322 >                            Node<K,V>[] nt = (Node<K,V>[])new Node<?,?>[n];
2323                              table = nt;
2324                              sc = n - (n >>> 2);
2325                          }
# Line 2250 | Line 2330 | public class ConcurrentHashMap<K,V> impl
2330              }
2331              else if (c <= sc || n >= MAXIMUM_CAPACITY)
2332                  break;
2333 <            else if (tab == table &&
2334 <                     U.compareAndSwapInt(this, SIZECTL, sc, -2))
2335 <                transfer(tab, null);
2333 >            else if (tab == table) {
2334 >                int rs = resizeStamp(n);
2335 >                if (U.compareAndSwapInt(this, SIZECTL, sc,
2336 >                                        (rs << RESIZE_STAMP_SHIFT) + 2))
2337 >                    transfer(tab, null);
2338 >            }
2339          }
2340      }
2341  
# Line 2266 | Line 2349 | public class ConcurrentHashMap<K,V> impl
2349              stride = MIN_TRANSFER_STRIDE; // subdivide range
2350          if (nextTab == null) {            // initiating
2351              try {
2352 <                @SuppressWarnings({"rawtypes","unchecked"})
2353 <                    Node<K,V>[] nt = (Node<K,V>[])new Node[n << 1];
2352 >                @SuppressWarnings("unchecked")
2353 >                Node<K,V>[] nt = (Node<K,V>[])new Node<?,?>[n << 1];
2354                  nextTab = nt;
2355              } catch (Throwable ex) {      // try to cope with OOME
2356                  sizeCtl = Integer.MAX_VALUE;
2357                  return;
2358              }
2359              nextTable = nextTab;
2277            transferOrigin = n;
2360              transferIndex = n;
2279            ForwardingNode<K,V> rev = new ForwardingNode<K,V>(tab);
2280            for (int k = n; k > 0;) {    // progressively reveal ready slots
2281                int nextk = (k > stride) ? k - stride : 0;
2282                for (int m = nextk; m < k; ++m)
2283                    nextTab[m] = rev;
2284                for (int m = n + nextk; m < n + k; ++m)
2285                    nextTab[m] = rev;
2286                U.putOrderedInt(this, TRANSFERORIGIN, k = nextk);
2287            }
2361          }
2362          int nextn = nextTab.length;
2363          ForwardingNode<K,V> fwd = new ForwardingNode<K,V>(nextTab);
2364          boolean advance = true;
2365 +        boolean finishing = false; // to ensure sweep before committing nextTab
2366          for (int i = 0, bound = 0;;) {
2367 <            int nextIndex, nextBound, fh; Node<K,V> f;
2367 >            Node<K,V> f; int fh;
2368              while (advance) {
2369 <                if (--i >= bound)
2369 >                int nextIndex, nextBound;
2370 >                if (--i >= bound || finishing)
2371                      advance = false;
2372 <                else if ((nextIndex = transferIndex) <= transferOrigin) {
2372 >                else if ((nextIndex = transferIndex) <= 0) {
2373                      i = -1;
2374                      advance = false;
2375                  }
# Line 2308 | Line 2383 | public class ConcurrentHashMap<K,V> impl
2383                  }
2384              }
2385              if (i < 0 || i >= n || i + n >= nextn) {
2386 <                for (int sc;;) {
2387 <                    if (U.compareAndSwapInt(this, SIZECTL, sc = sizeCtl, ++sc)) {
2388 <                        if (sc == -1) {
2389 <                            nextTable = null;
2390 <                            table = nextTab;
2391 <                            sizeCtl = (n << 1) - (n >>> 1);
2317 <                        }
2318 <                        return;
2319 <                    }
2386 >                int sc;
2387 >                if (finishing) {
2388 >                    nextTable = null;
2389 >                    table = nextTab;
2390 >                    sizeCtl = (n << 1) - (n >>> 1);
2391 >                    return;
2392                  }
2393 <            }
2394 <            else if ((f = tabAt(tab, i)) == null) {
2395 <                if (casTabAt(tab, i, null, fwd)) {
2396 <                    setTabAt(nextTab, i, null);
2397 <                    setTabAt(nextTab, i + n, null);
2326 <                    advance = true;
2393 >                if (U.compareAndSwapInt(this, SIZECTL, sc = sizeCtl, sc - 1)) {
2394 >                    if ((sc - 2) != resizeStamp(n) << RESIZE_STAMP_SHIFT)
2395 >                        return;
2396 >                    finishing = advance = true;
2397 >                    i = n; // recheck before commit
2398                  }
2399              }
2400 +            else if ((f = tabAt(tab, i)) == null)
2401 +                advance = casTabAt(tab, i, null, fwd);
2402              else if ((fh = f.hash) == MOVED)
2403                  advance = true; // already processed
2404              else {
# Line 2357 | Line 2430 | public class ConcurrentHashMap<K,V> impl
2430                                  else
2431                                      hn = new Node<K,V>(ph, pk, pv, hn);
2432                              }
2433 +                            setTabAt(nextTab, i, ln);
2434 +                            setTabAt(nextTab, i + n, hn);
2435 +                            setTabAt(tab, i, fwd);
2436 +                            advance = true;
2437                          }
2438                          else if (f instanceof TreeBin) {
2439                              TreeBin<K,V> t = (TreeBin<K,V>)f;
# Line 2388 | Line 2465 | public class ConcurrentHashMap<K,V> impl
2465                                  (hc != 0) ? new TreeBin<K,V>(lo) : t;
2466                              hn = (hc <= UNTREEIFY_THRESHOLD) ? untreeify(hi) :
2467                                  (lc != 0) ? new TreeBin<K,V>(hi) : t;
2468 +                            setTabAt(nextTab, i, ln);
2469 +                            setTabAt(nextTab, i + n, hn);
2470 +                            setTabAt(tab, i, fwd);
2471 +                            advance = true;
2472                          }
2392                        else
2393                            ln = hn = null;
2394                        setTabAt(nextTab, i, ln);
2395                        setTabAt(nextTab, i + n, hn);
2396                        setTabAt(tab, i, fwd);
2397                        advance = true;
2473                      }
2474                  }
2475              }
# Line 2515 | Line 2590 | public class ConcurrentHashMap<K,V> impl
2590      private final void treeifyBin(Node<K,V>[] tab, int index) {
2591          Node<K,V> b; int n, sc;
2592          if (tab != null) {
2593 <            if ((n = tab.length) < MIN_TREEIFY_CAPACITY) {
2594 <                if (tab == table && (sc = sizeCtl) >= 0 &&
2595 <                    U.compareAndSwapInt(this, SIZECTL, sc, -2))
2521 <                    transfer(tab, null);
2522 <            }
2523 <            else if ((b = tabAt(tab, index)) != null) {
2593 >            if ((n = tab.length) < MIN_TREEIFY_CAPACITY)
2594 >                tryPresize(n << 1);
2595 >            else if ((b = tabAt(tab, index)) != null && b.hash >= 0) {
2596                  synchronized (b) {
2597                      if (tabAt(tab, index) == b) {
2598                          TreeNode<K,V> hd = null, tl = null;
# Line 2542 | Line 2614 | public class ConcurrentHashMap<K,V> impl
2614      }
2615  
2616      /**
2617 <     * Returns a list on non-TreeNodes replacing those in given list
2617 >     * Returns a list on non-TreeNodes replacing those in given list.
2618       */
2619      static <K,V> Node<K,V> untreeify(Node<K,V> b) {
2620          Node<K,V> hd = null, tl = null;
# Line 2586 | Line 2658 | public class ConcurrentHashMap<K,V> impl
2658          final TreeNode<K,V> findTreeNode(int h, Object k, Class<?> kc) {
2659              if (k != null) {
2660                  TreeNode<K,V> p = this;
2661 <                do  {
2661 >                do {
2662                      int ph, dir; K pk; TreeNode<K,V> q;
2663                      TreeNode<K,V> pl = p.left, pr = p.right;
2664                      if ((ph = p.hash) > h)
# Line 2595 | Line 2667 | public class ConcurrentHashMap<K,V> impl
2667                          p = pr;
2668                      else if ((pk = p.key) == k || (pk != null && k.equals(pk)))
2669                          return p;
2670 <                    else if (pl == null && pr == null)
2671 <                        break;
2670 >                    else if (pl == null)
2671 >                        p = pr;
2672 >                    else if (pr == null)
2673 >                        p = pl;
2674                      else if ((kc != null ||
2675                                (kc = comparableClassFor(k)) != null) &&
2676                               (dir = compareComparables(kc, k, pk)) != 0)
2677                          p = (dir < 0) ? pl : pr;
2678 <                    else if (pl == null)
2605 <                        p = pr;
2606 <                    else if (pr == null ||
2607 <                             (q = pr.findTreeNode(h, k, kc)) == null)
2608 <                        p = pl;
2609 <                    else
2678 >                    else if ((q = pr.findTreeNode(h, k, kc)) != null)
2679                          return q;
2680 +                    else
2681 +                        p = pl;
2682                  } while (p != null);
2683              }
2684              return null;
# Line 2634 | Line 2705 | public class ConcurrentHashMap<K,V> impl
2705          static final int READER = 4; // increment value for setting read lock
2706  
2707          /**
2708 +         * Tie-breaking utility for ordering insertions when equal
2709 +         * hashCodes and non-comparable. We don't require a total
2710 +         * order, just a consistent insertion rule to maintain
2711 +         * equivalence across rebalancings. Tie-breaking further than
2712 +         * necessary simplifies testing a bit.
2713 +         */
2714 +        static int tieBreakOrder(Object a, Object b) {
2715 +            int d;
2716 +            if (a == null || b == null ||
2717 +                (d = a.getClass().getName().
2718 +                 compareTo(b.getClass().getName())) == 0)
2719 +                d = (System.identityHashCode(a) <= System.identityHashCode(b) ?
2720 +                     -1 : 1);
2721 +            return d;
2722 +        }
2723 +
2724 +        /**
2725           * Creates bin with initial set of nodes headed by b.
2726           */
2727          TreeBin(TreeNode<K,V> b) {
# Line 2649 | Line 2737 | public class ConcurrentHashMap<K,V> impl
2737                      r = x;
2738                  }
2739                  else {
2740 <                    Object key = x.key;
2741 <                    int hash = x.hash;
2740 >                    K k = x.key;
2741 >                    int h = x.hash;
2742                      Class<?> kc = null;
2743                      for (TreeNode<K,V> p = r;;) {
2744                          int dir, ph;
2745 <                        if ((ph = p.hash) > hash)
2745 >                        K pk = p.key;
2746 >                        if ((ph = p.hash) > h)
2747                              dir = -1;
2748 <                        else if (ph < hash)
2748 >                        else if (ph < h)
2749                              dir = 1;
2750 <                        else if ((kc != null ||
2751 <                                  (kc = comparableClassFor(key)) != null))
2752 <                            dir = compareComparables(kc, key, p.key);
2753 <                        else
2665 <                            dir = 0;
2750 >                        else if ((kc == null &&
2751 >                                  (kc = comparableClassFor(k)) == null) ||
2752 >                                 (dir = compareComparables(kc, k, pk)) == 0)
2753 >                            dir = tieBreakOrder(k, pk);
2754                          TreeNode<K,V> xp = p;
2755                          if ((p = (dir <= 0) ? p.left : p.right) == null) {
2756                              x.parent = xp;
# Line 2677 | Line 2765 | public class ConcurrentHashMap<K,V> impl
2765                  }
2766              }
2767              this.root = r;
2768 +            assert checkInvariants(root);
2769          }
2770  
2771          /**
2772 <         * Acquires write lock for tree restructuring
2772 >         * Acquires write lock for tree restructuring.
2773           */
2774          private final void lockRoot() {
2775              if (!U.compareAndSwapInt(this, LOCKSTATE, 0, WRITER))
# Line 2688 | Line 2777 | public class ConcurrentHashMap<K,V> impl
2777          }
2778  
2779          /**
2780 <         * Releases write lock for tree restructuring
2780 >         * Releases write lock for tree restructuring.
2781           */
2782          private final void unlockRoot() {
2783              lockState = 0;
2784          }
2785  
2786          /**
2787 <         * Possibly blocks awaiting root lock
2787 >         * Possibly blocks awaiting root lock.
2788           */
2789          private final void contendedLock() {
2790              boolean waiting = false;
2791              for (int s;;) {
2792 <                if (((s = lockState) & WRITER) == 0) {
2792 >                if (((s = lockState) & ~WAITER) == 0) {
2793                      if (U.compareAndSwapInt(this, LOCKSTATE, s, WRITER)) {
2794                          if (waiting)
2795                              waiter = null;
2796                          return;
2797                      }
2798                  }
2799 <                else if ((s | WAITER) == 0) {
2799 >                else if ((s & WAITER) == 0) {
2800                      if (U.compareAndSwapInt(this, LOCKSTATE, s, s | WAITER)) {
2801                          waiting = true;
2802                          waiter = Thread.currentThread();
# Line 2720 | Line 2809 | public class ConcurrentHashMap<K,V> impl
2809  
2810          /**
2811           * Returns matching node or null if none. Tries to search
2812 <         * using tree compareisons from root, but continues linear
2812 >         * using tree comparisons from root, but continues linear
2813           * search when lock not available.
2814           */
2815          final Node<K,V> find(int h, Object k) {
2816              if (k != null) {
2817 <                for (Node<K,V> e = first; e != null; e = e.next) {
2817 >                for (Node<K,V> e = first; e != null; ) {
2818                      int s; K ek;
2819                      if (((s = lockState) & (WAITER|WRITER)) != 0) {
2820                          if (e.hash == h &&
2821                              ((ek = e.key) == k || (ek != null && k.equals(ek))))
2822                              return e;
2823 +                        e = e.next;
2824                      }
2825                      else if (U.compareAndSwapInt(this, LOCKSTATE, s,
2826                                                   s + READER)) {
# Line 2757 | Line 2847 | public class ConcurrentHashMap<K,V> impl
2847           */
2848          final TreeNode<K,V> putTreeVal(int h, K k, V v) {
2849              Class<?> kc = null;
2850 +            boolean searched = false;
2851              for (TreeNode<K,V> p = root;;) {
2852 <                int dir, ph; K pk; TreeNode<K,V> q, pr;
2852 >                int dir, ph; K pk;
2853                  if (p == null) {
2854                      first = root = new TreeNode<K,V>(h, k, v, null, null);
2855                      break;
# Line 2772 | Line 2863 | public class ConcurrentHashMap<K,V> impl
2863                  else if ((kc == null &&
2864                            (kc = comparableClassFor(k)) == null) ||
2865                           (dir = compareComparables(kc, k, pk)) == 0) {
2866 <                    if (p.left == null)
2867 <                        dir = 1;
2868 <                    else if ((pr = p.right) == null ||
2869 <                             (q = pr.findTreeNode(h, k, kc)) == null)
2870 <                        dir = -1;
2871 <                    else
2872 <                        return q;
2866 >                    if (!searched) {
2867 >                        TreeNode<K,V> q, ch;
2868 >                        searched = true;
2869 >                        if (((ch = p.left) != null &&
2870 >                             (q = ch.findTreeNode(h, k, kc)) != null) ||
2871 >                            ((ch = p.right) != null &&
2872 >                             (q = ch.findTreeNode(h, k, kc)) != null))
2873 >                            return q;
2874 >                    }
2875 >                    dir = tieBreakOrder(k, pk);
2876                  }
2877 +
2878                  TreeNode<K,V> xp = p;
2879 <                if ((p = (dir < 0) ? p.left : p.right) == null) {
2879 >                if ((p = (dir <= 0) ? p.left : p.right) == null) {
2880                      TreeNode<K,V> x, f = first;
2881                      first = x = new TreeNode<K,V>(h, k, v, f, xp);
2882                      if (f != null)
2883                          f.prev = x;
2884 <                    if (dir < 0)
2884 >                    if (dir <= 0)
2885                          xp.left = x;
2886                      else
2887                          xp.right = x;
# Line 2815 | Line 2910 | public class ConcurrentHashMap<K,V> impl
2910           * that are accessible independently of lock. So instead we
2911           * swap the tree linkages.
2912           *
2913 <         * @return true if now too small so should be untreeified.
2913 >         * @return true if now too small, so should be untreeified
2914           */
2915          final boolean removeTreeNode(TreeNode<K,V> p) {
2916              TreeNode<K,V> next = (TreeNode<K,V>)p.next;
# Line 3009 | Line 3104 | public class ConcurrentHashMap<K,V> impl
3104  
3105          static <K,V> TreeNode<K,V> balanceDeletion(TreeNode<K,V> root,
3106                                                     TreeNode<K,V> x) {
3107 <            for (TreeNode<K,V> xp, xpl, xpr;;)  {
3107 >            for (TreeNode<K,V> xp, xpl, xpr;;) {
3108                  if (x == null || x == root)
3109                      return root;
3110                  else if ((xp = x.parent) == null) {
# Line 3124 | Line 3219 | public class ConcurrentHashMap<K,V> impl
3219              return true;
3220          }
3221  
3222 <        private static final sun.misc.Unsafe U;
3222 >        private static final sun.misc.Unsafe U = sun.misc.Unsafe.getUnsafe();
3223          private static final long LOCKSTATE;
3224          static {
3225              try {
3131                U = sun.misc.Unsafe.getUnsafe();
3132                Class<?> k = TreeBin.class;
3226                  LOCKSTATE = U.objectFieldOffset
3227 <                    (k.getDeclaredField("lockState"));
3228 <            } catch (Exception e) {
3227 >                    (TreeBin.class.getDeclaredField("lockState"));
3228 >            } catch (ReflectiveOperationException e) {
3229                  throw new Error(e);
3230              }
3231          }
# Line 3141 | Line 3234 | public class ConcurrentHashMap<K,V> impl
3234      /* ----------------Table Traversal -------------- */
3235  
3236      /**
3237 +     * Records the table, its length, and current traversal index for a
3238 +     * traverser that must process a region of a forwarded table before
3239 +     * proceeding with current table.
3240 +     */
3241 +    static final class TableStack<K,V> {
3242 +        int length;
3243 +        int index;
3244 +        Node<K,V>[] tab;
3245 +        TableStack<K,V> next;
3246 +    }
3247 +
3248 +    /**
3249       * Encapsulates traversal for methods such as containsValue; also
3250       * serves as a base class for other iterators and spliterators.
3251       *
# Line 3164 | Line 3269 | public class ConcurrentHashMap<K,V> impl
3269      static class Traverser<K,V> {
3270          Node<K,V>[] tab;        // current table; updated if resized
3271          Node<K,V> next;         // the next entry to use
3272 +        TableStack<K,V> stack, spare; // to save/restore on ForwardingNodes
3273          int index;              // index of bin to use next
3274          int baseIndex;          // current index of initial table
3275          int baseLimit;          // index bound for initial table
# Line 3185 | Line 3291 | public class ConcurrentHashMap<K,V> impl
3291              if ((e = next) != null)
3292                  e = e.next;
3293              for (;;) {
3294 <                Node<K,V>[] t; int i, n; K ek;  // must use locals in checks
3294 >                Node<K,V>[] t; int i, n;  // must use locals in checks
3295                  if (e != null)
3296                      return next = e;
3297                  if (baseIndex >= baseLimit || (t = tab) == null ||
3298                      (n = t.length) <= (i = index) || i < 0)
3299                      return next = null;
3300 <                if ((e = tabAt(t, index)) != null && e.hash < 0) {
3300 >                if ((e = tabAt(t, i)) != null && e.hash < 0) {
3301                      if (e instanceof ForwardingNode) {
3302                          tab = ((ForwardingNode<K,V>)e).nextTable;
3303                          e = null;
3304 +                        pushState(t, i, n);
3305                          continue;
3306                      }
3307                      else if (e instanceof TreeBin)
# Line 3202 | Line 3309 | public class ConcurrentHashMap<K,V> impl
3309                      else
3310                          e = null;
3311                  }
3312 <                if ((index += baseSize) >= n)
3313 <                    index = ++baseIndex;    // visit upper slots if present
3312 >                if (stack != null)
3313 >                    recoverState(n);
3314 >                else if ((index = i + baseSize) >= n)
3315 >                    index = ++baseIndex; // visit upper slots if present
3316 >            }
3317 >        }
3318 >
3319 >        /**
3320 >         * Saves traversal state upon encountering a forwarding node.
3321 >         */
3322 >        private void pushState(Node<K,V>[] t, int i, int n) {
3323 >            TableStack<K,V> s = spare;  // reuse if possible
3324 >            if (s != null)
3325 >                spare = s.next;
3326 >            else
3327 >                s = new TableStack<K,V>();
3328 >            s.tab = t;
3329 >            s.length = n;
3330 >            s.index = i;
3331 >            s.next = stack;
3332 >            stack = s;
3333 >        }
3334 >
3335 >        /**
3336 >         * Possibly pops traversal state.
3337 >         *
3338 >         * @param n length of current table
3339 >         */
3340 >        private void recoverState(int n) {
3341 >            TableStack<K,V> s; int len;
3342 >            while ((s = stack) != null && (index += (len = s.length)) >= n) {
3343 >                n = len;
3344 >                index = s.index;
3345 >                tab = s.tab;
3346 >                s.tab = null;
3347 >                TableStack<K,V> next = s.next;
3348 >                s.next = spare; // save for reuse
3349 >                stack = next;
3350 >                spare = s;
3351              }
3352 +            if (s == null && (index += baseSize) >= n)
3353 +                index = ++baseIndex;
3354          }
3355      }
3356  
3357      /**
3358       * Base of key, value, and entry Iterators. Adds fields to
3359 <     * Traverser to support iterator.remove
3359 >     * Traverser to support iterator.remove.
3360       */
3361      static class BaseIterator<K,V> extends Traverser<K,V> {
3362          final ConcurrentHashMap<K,V> map;
# Line 3498 | Line 3644 | public class ConcurrentHashMap<K,V> impl
3644       * for an element, or null if there is no transformation (in
3645       * which case the action is not applied)
3646       * @param action the action
3647 +     * @param <U> the return type of the transformer
3648       * @since 1.8
3649       */
3650      public <U> void forEach(long parallelismThreshold,
# Line 3521 | Line 3668 | public class ConcurrentHashMap<K,V> impl
3668       * needed for this operation to be executed in parallel
3669       * @param searchFunction a function returning a non-null
3670       * result on success, else null
3671 +     * @param <U> the return type of the search function
3672       * @return a non-null result from applying the given search
3673       * function on each (key, value), or null if none
3674       * @since 1.8
# Line 3544 | Line 3692 | public class ConcurrentHashMap<K,V> impl
3692       * for an element, or null if there is no transformation (in
3693       * which case it is not combined)
3694       * @param reducer a commutative associative combining function
3695 +     * @param <U> the return type of the transformer
3696       * @return the result of accumulating the given transformation
3697       * of all (key, value) pairs
3698       * @since 1.8
# Line 3573 | Line 3722 | public class ConcurrentHashMap<K,V> impl
3722       * of all (key, value) pairs
3723       * @since 1.8
3724       */
3725 <    public double reduceToDoubleIn(long parallelismThreshold,
3726 <                                   ToDoubleBiFunction<? super K, ? super V> transformer,
3727 <                                   double basis,
3728 <                                   DoubleBinaryOperator reducer) {
3725 >    public double reduceToDouble(long parallelismThreshold,
3726 >                                 ToDoubleBiFunction<? super K, ? super V> transformer,
3727 >                                 double basis,
3728 >                                 DoubleBinaryOperator reducer) {
3729          if (transformer == null || reducer == null)
3730              throw new NullPointerException();
3731          return new MapReduceMappingsToDoubleTask<K,V>
# Line 3662 | Line 3811 | public class ConcurrentHashMap<K,V> impl
3811       * for an element, or null if there is no transformation (in
3812       * which case the action is not applied)
3813       * @param action the action
3814 +     * @param <U> the return type of the transformer
3815       * @since 1.8
3816       */
3817      public <U> void forEachKey(long parallelismThreshold,
# Line 3685 | Line 3835 | public class ConcurrentHashMap<K,V> impl
3835       * needed for this operation to be executed in parallel
3836       * @param searchFunction a function returning a non-null
3837       * result on success, else null
3838 +     * @param <U> the return type of the search function
3839       * @return a non-null result from applying the given search
3840       * function on each key, or null if none
3841       * @since 1.8
# Line 3727 | Line 3878 | public class ConcurrentHashMap<K,V> impl
3878       * for an element, or null if there is no transformation (in
3879       * which case it is not combined)
3880       * @param reducer a commutative associative combining function
3881 +     * @param <U> the return type of the transformer
3882       * @return the result of accumulating the given transformation
3883       * of all keys
3884       * @since 1.8
# Line 3846 | Line 3998 | public class ConcurrentHashMap<K,V> impl
3998       * for an element, or null if there is no transformation (in
3999       * which case the action is not applied)
4000       * @param action the action
4001 +     * @param <U> the return type of the transformer
4002       * @since 1.8
4003       */
4004      public <U> void forEachValue(long parallelismThreshold,
# Line 3869 | Line 4022 | public class ConcurrentHashMap<K,V> impl
4022       * needed for this operation to be executed in parallel
4023       * @param searchFunction a function returning a non-null
4024       * result on success, else null
4025 +     * @param <U> the return type of the search function
4026       * @return a non-null result from applying the given search
4027       * function on each value, or null if none
4028       * @since 1.8
# Line 3910 | Line 4064 | public class ConcurrentHashMap<K,V> impl
4064       * for an element, or null if there is no transformation (in
4065       * which case it is not combined)
4066       * @param reducer a commutative associative combining function
4067 +     * @param <U> the return type of the transformer
4068       * @return the result of accumulating the given transformation
4069       * of all values
4070       * @since 1.8
# Line 4027 | Line 4182 | public class ConcurrentHashMap<K,V> impl
4182       * for an element, or null if there is no transformation (in
4183       * which case the action is not applied)
4184       * @param action the action
4185 +     * @param <U> the return type of the transformer
4186       * @since 1.8
4187       */
4188      public <U> void forEachEntry(long parallelismThreshold,
# Line 4050 | Line 4206 | public class ConcurrentHashMap<K,V> impl
4206       * needed for this operation to be executed in parallel
4207       * @param searchFunction a function returning a non-null
4208       * result on success, else null
4209 +     * @param <U> the return type of the search function
4210       * @return a non-null result from applying the given search
4211       * function on each entry, or null if none
4212       * @since 1.8
# Line 4091 | Line 4248 | public class ConcurrentHashMap<K,V> impl
4248       * for an element, or null if there is no transformation (in
4249       * which case it is not combined)
4250       * @param reducer a commutative associative combining function
4251 +     * @param <U> the return type of the transformer
4252       * @return the result of accumulating the given transformation
4253       * of all entries
4254       * @since 1.8
# Line 4213 | Line 4371 | public class ConcurrentHashMap<K,V> impl
4371          // implementations below rely on concrete classes supplying these
4372          // abstract methods
4373          /**
4374 <         * Returns a "weakly consistent" iterator that will never
4375 <         * throw {@link ConcurrentModificationException}, and
4376 <         * guarantees to traverse elements as they existed upon
4377 <         * construction of the iterator, and may (but is not
4378 <         * guaranteed to) reflect any modifications subsequent to
4379 <         * construction.
4374 >         * Returns an iterator over the elements in this collection.
4375 >         *
4376 >         * <p>The returned iterator is
4377 >         * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>.
4378 >         *
4379 >         * @return an iterator over the elements in this collection
4380           */
4381          public abstract Iterator<E> iterator();
4382          public abstract boolean contains(Object o);
# Line 4316 | Line 4474 | public class ConcurrentHashMap<K,V> impl
4474          }
4475  
4476          public final boolean removeAll(Collection<?> c) {
4477 +            if (c == null) throw new NullPointerException();
4478              boolean modified = false;
4479              for (Iterator<E> it = iterator(); it.hasNext();) {
4480                  if (c.contains(it.next())) {
# Line 4327 | Line 4486 | public class ConcurrentHashMap<K,V> impl
4486          }
4487  
4488          public final boolean retainAll(Collection<?> c) {
4489 +            if (c == null) throw new NullPointerException();
4490              boolean modified = false;
4491              for (Iterator<E> it = iterator(); it.hasNext();) {
4492                  if (!c.contains(it.next())) {
# Line 4621 | Line 4781 | public class ConcurrentHashMap<K,V> impl
4781       * Base class for bulk tasks. Repeats some fields and code from
4782       * class Traverser, because we need to subclass CountedCompleter.
4783       */
4784 +    @SuppressWarnings("serial")
4785      abstract static class BulkTask<K,V,R> extends CountedCompleter<R> {
4786          Node<K,V>[] tab;        // same as Traverser
4787          Node<K,V> next;
4788 +        TableStack<K,V> stack, spare;
4789          int index;
4790          int baseIndex;
4791          int baseLimit;
# Line 4652 | Line 4814 | public class ConcurrentHashMap<K,V> impl
4814              if ((e = next) != null)
4815                  e = e.next;
4816              for (;;) {
4817 <                Node<K,V>[] t; int i, n; K ek;  // must use locals in checks
4817 >                Node<K,V>[] t; int i, n;
4818                  if (e != null)
4819                      return next = e;
4820                  if (baseIndex >= baseLimit || (t = tab) == null ||
4821                      (n = t.length) <= (i = index) || i < 0)
4822                      return next = null;
4823 <                if ((e = tabAt(t, index)) != null && e.hash < 0) {
4823 >                if ((e = tabAt(t, i)) != null && e.hash < 0) {
4824                      if (e instanceof ForwardingNode) {
4825                          tab = ((ForwardingNode<K,V>)e).nextTable;
4826                          e = null;
4827 +                        pushState(t, i, n);
4828                          continue;
4829                      }
4830                      else if (e instanceof TreeBin)
# Line 4669 | Line 4832 | public class ConcurrentHashMap<K,V> impl
4832                      else
4833                          e = null;
4834                  }
4835 <                if ((index += baseSize) >= n)
4836 <                    index = ++baseIndex;    // visit upper slots if present
4835 >                if (stack != null)
4836 >                    recoverState(n);
4837 >                else if ((index = i + baseSize) >= n)
4838 >                    index = ++baseIndex;
4839              }
4840          }
4841 +
4842 +        private void pushState(Node<K,V>[] t, int i, int n) {
4843 +            TableStack<K,V> s = spare;
4844 +            if (s != null)
4845 +                spare = s.next;
4846 +            else
4847 +                s = new TableStack<K,V>();
4848 +            s.tab = t;
4849 +            s.length = n;
4850 +            s.index = i;
4851 +            s.next = stack;
4852 +            stack = s;
4853 +        }
4854 +
4855 +        private void recoverState(int n) {
4856 +            TableStack<K,V> s; int len;
4857 +            while ((s = stack) != null && (index += (len = s.length)) >= n) {
4858 +                n = len;
4859 +                index = s.index;
4860 +                tab = s.tab;
4861 +                s.tab = null;
4862 +                TableStack<K,V> next = s.next;
4863 +                s.next = spare; // save for reuse
4864 +                stack = next;
4865 +                spare = s;
4866 +            }
4867 +            if (s == null && (index += baseSize) >= n)
4868 +                index = ++baseIndex;
4869 +        }
4870      }
4871  
4872      /*
# Line 5131 | Line 5325 | public class ConcurrentHashMap<K,V> impl
5325                  result = r;
5326                  CountedCompleter<?> c;
5327                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5328 <                    @SuppressWarnings("unchecked") ReduceKeysTask<K,V>
5328 >                    @SuppressWarnings("unchecked")
5329 >                    ReduceKeysTask<K,V>
5330                          t = (ReduceKeysTask<K,V>)c,
5331                          s = t.rights;
5332                      while (s != null) {
# Line 5178 | Line 5373 | public class ConcurrentHashMap<K,V> impl
5373                  result = r;
5374                  CountedCompleter<?> c;
5375                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5376 <                    @SuppressWarnings("unchecked") ReduceValuesTask<K,V>
5376 >                    @SuppressWarnings("unchecked")
5377 >                    ReduceValuesTask<K,V>
5378                          t = (ReduceValuesTask<K,V>)c,
5379                          s = t.rights;
5380                      while (s != null) {
# Line 5223 | Line 5419 | public class ConcurrentHashMap<K,V> impl
5419                  result = r;
5420                  CountedCompleter<?> c;
5421                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5422 <                    @SuppressWarnings("unchecked") ReduceEntriesTask<K,V>
5422 >                    @SuppressWarnings("unchecked")
5423 >                    ReduceEntriesTask<K,V>
5424                          t = (ReduceEntriesTask<K,V>)c,
5425                          s = t.rights;
5426                      while (s != null) {
# Line 5276 | Line 5473 | public class ConcurrentHashMap<K,V> impl
5473                  result = r;
5474                  CountedCompleter<?> c;
5475                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5476 <                    @SuppressWarnings("unchecked") MapReduceKeysTask<K,V,U>
5476 >                    @SuppressWarnings("unchecked")
5477 >                    MapReduceKeysTask<K,V,U>
5478                          t = (MapReduceKeysTask<K,V,U>)c,
5479                          s = t.rights;
5480                      while (s != null) {
# Line 5329 | Line 5527 | public class ConcurrentHashMap<K,V> impl
5527                  result = r;
5528                  CountedCompleter<?> c;
5529                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5530 <                    @SuppressWarnings("unchecked") MapReduceValuesTask<K,V,U>
5530 >                    @SuppressWarnings("unchecked")
5531 >                    MapReduceValuesTask<K,V,U>
5532                          t = (MapReduceValuesTask<K,V,U>)c,
5533                          s = t.rights;
5534                      while (s != null) {
# Line 5382 | Line 5581 | public class ConcurrentHashMap<K,V> impl
5581                  result = r;
5582                  CountedCompleter<?> c;
5583                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5584 <                    @SuppressWarnings("unchecked") MapReduceEntriesTask<K,V,U>
5584 >                    @SuppressWarnings("unchecked")
5585 >                    MapReduceEntriesTask<K,V,U>
5586                          t = (MapReduceEntriesTask<K,V,U>)c,
5587                          s = t.rights;
5588                      while (s != null) {
# Line 5435 | Line 5635 | public class ConcurrentHashMap<K,V> impl
5635                  result = r;
5636                  CountedCompleter<?> c;
5637                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5638 <                    @SuppressWarnings("unchecked") MapReduceMappingsTask<K,V,U>
5638 >                    @SuppressWarnings("unchecked")
5639 >                    MapReduceMappingsTask<K,V,U>
5640                          t = (MapReduceMappingsTask<K,V,U>)c,
5641                          s = t.rights;
5642                      while (s != null) {
# Line 5487 | Line 5688 | public class ConcurrentHashMap<K,V> impl
5688                  result = r;
5689                  CountedCompleter<?> c;
5690                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5691 <                    @SuppressWarnings("unchecked") MapReduceKeysToDoubleTask<K,V>
5691 >                    @SuppressWarnings("unchecked")
5692 >                    MapReduceKeysToDoubleTask<K,V>
5693                          t = (MapReduceKeysToDoubleTask<K,V>)c,
5694                          s = t.rights;
5695                      while (s != null) {
# Line 5536 | Line 5738 | public class ConcurrentHashMap<K,V> impl
5738                  result = r;
5739                  CountedCompleter<?> c;
5740                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5741 <                    @SuppressWarnings("unchecked") MapReduceValuesToDoubleTask<K,V>
5741 >                    @SuppressWarnings("unchecked")
5742 >                    MapReduceValuesToDoubleTask<K,V>
5743                          t = (MapReduceValuesToDoubleTask<K,V>)c,
5744                          s = t.rights;
5745                      while (s != null) {
# Line 5585 | Line 5788 | public class ConcurrentHashMap<K,V> impl
5788                  result = r;
5789                  CountedCompleter<?> c;
5790                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5791 <                    @SuppressWarnings("unchecked") MapReduceEntriesToDoubleTask<K,V>
5791 >                    @SuppressWarnings("unchecked")
5792 >                    MapReduceEntriesToDoubleTask<K,V>
5793                          t = (MapReduceEntriesToDoubleTask<K,V>)c,
5794                          s = t.rights;
5795                      while (s != null) {
# Line 5634 | Line 5838 | public class ConcurrentHashMap<K,V> impl
5838                  result = r;
5839                  CountedCompleter<?> c;
5840                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5841 <                    @SuppressWarnings("unchecked") MapReduceMappingsToDoubleTask<K,V>
5841 >                    @SuppressWarnings("unchecked")
5842 >                    MapReduceMappingsToDoubleTask<K,V>
5843                          t = (MapReduceMappingsToDoubleTask<K,V>)c,
5844                          s = t.rights;
5845                      while (s != null) {
# Line 5683 | Line 5888 | public class ConcurrentHashMap<K,V> impl
5888                  result = r;
5889                  CountedCompleter<?> c;
5890                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5891 <                    @SuppressWarnings("unchecked") MapReduceKeysToLongTask<K,V>
5891 >                    @SuppressWarnings("unchecked")
5892 >                    MapReduceKeysToLongTask<K,V>
5893                          t = (MapReduceKeysToLongTask<K,V>)c,
5894                          s = t.rights;
5895                      while (s != null) {
# Line 5732 | Line 5938 | public class ConcurrentHashMap<K,V> impl
5938                  result = r;
5939                  CountedCompleter<?> c;
5940                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5941 <                    @SuppressWarnings("unchecked") MapReduceValuesToLongTask<K,V>
5941 >                    @SuppressWarnings("unchecked")
5942 >                    MapReduceValuesToLongTask<K,V>
5943                          t = (MapReduceValuesToLongTask<K,V>)c,
5944                          s = t.rights;
5945                      while (s != null) {
# Line 5781 | Line 5988 | public class ConcurrentHashMap<K,V> impl
5988                  result = r;
5989                  CountedCompleter<?> c;
5990                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5991 <                    @SuppressWarnings("unchecked") MapReduceEntriesToLongTask<K,V>
5991 >                    @SuppressWarnings("unchecked")
5992 >                    MapReduceEntriesToLongTask<K,V>
5993                          t = (MapReduceEntriesToLongTask<K,V>)c,
5994                          s = t.rights;
5995                      while (s != null) {
# Line 5830 | Line 6038 | public class ConcurrentHashMap<K,V> impl
6038                  result = r;
6039                  CountedCompleter<?> c;
6040                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6041 <                    @SuppressWarnings("unchecked") MapReduceMappingsToLongTask<K,V>
6041 >                    @SuppressWarnings("unchecked")
6042 >                    MapReduceMappingsToLongTask<K,V>
6043                          t = (MapReduceMappingsToLongTask<K,V>)c,
6044                          s = t.rights;
6045                      while (s != null) {
# Line 5879 | Line 6088 | public class ConcurrentHashMap<K,V> impl
6088                  result = r;
6089                  CountedCompleter<?> c;
6090                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6091 <                    @SuppressWarnings("unchecked") MapReduceKeysToIntTask<K,V>
6091 >                    @SuppressWarnings("unchecked")
6092 >                    MapReduceKeysToIntTask<K,V>
6093                          t = (MapReduceKeysToIntTask<K,V>)c,
6094                          s = t.rights;
6095                      while (s != null) {
# Line 5928 | Line 6138 | public class ConcurrentHashMap<K,V> impl
6138                  result = r;
6139                  CountedCompleter<?> c;
6140                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6141 <                    @SuppressWarnings("unchecked") MapReduceValuesToIntTask<K,V>
6141 >                    @SuppressWarnings("unchecked")
6142 >                    MapReduceValuesToIntTask<K,V>
6143                          t = (MapReduceValuesToIntTask<K,V>)c,
6144                          s = t.rights;
6145                      while (s != null) {
# Line 5977 | Line 6188 | public class ConcurrentHashMap<K,V> impl
6188                  result = r;
6189                  CountedCompleter<?> c;
6190                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6191 <                    @SuppressWarnings("unchecked") MapReduceEntriesToIntTask<K,V>
6191 >                    @SuppressWarnings("unchecked")
6192 >                    MapReduceEntriesToIntTask<K,V>
6193                          t = (MapReduceEntriesToIntTask<K,V>)c,
6194                          s = t.rights;
6195                      while (s != null) {
# Line 6026 | Line 6238 | public class ConcurrentHashMap<K,V> impl
6238                  result = r;
6239                  CountedCompleter<?> c;
6240                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6241 <                    @SuppressWarnings("unchecked") MapReduceMappingsToIntTask<K,V>
6241 >                    @SuppressWarnings("unchecked")
6242 >                    MapReduceMappingsToIntTask<K,V>
6243                          t = (MapReduceMappingsToIntTask<K,V>)c,
6244                          s = t.rights;
6245                      while (s != null) {
# Line 6039 | Line 6252 | public class ConcurrentHashMap<K,V> impl
6252      }
6253  
6254      // Unsafe mechanics
6255 <    private static final sun.misc.Unsafe U;
6255 >    private static final sun.misc.Unsafe U = sun.misc.Unsafe.getUnsafe();
6256      private static final long SIZECTL;
6257      private static final long TRANSFERINDEX;
6045    private static final long TRANSFERORIGIN;
6258      private static final long BASECOUNT;
6259      private static final long CELLSBUSY;
6260      private static final long CELLVALUE;
6261 <    private static final long ABASE;
6261 >    private static final int ABASE;
6262      private static final int ASHIFT;
6263  
6264      static {
6265          try {
6054            U = sun.misc.Unsafe.getUnsafe();
6055            Class<?> k = ConcurrentHashMap.class;
6266              SIZECTL = U.objectFieldOffset
6267 <                (k.getDeclaredField("sizeCtl"));
6267 >                (ConcurrentHashMap.class.getDeclaredField("sizeCtl"));
6268              TRANSFERINDEX = U.objectFieldOffset
6269 <                (k.getDeclaredField("transferIndex"));
6060 <            TRANSFERORIGIN = U.objectFieldOffset
6061 <                (k.getDeclaredField("transferOrigin"));
6269 >                (ConcurrentHashMap.class.getDeclaredField("transferIndex"));
6270              BASECOUNT = U.objectFieldOffset
6271 <                (k.getDeclaredField("baseCount"));
6271 >                (ConcurrentHashMap.class.getDeclaredField("baseCount"));
6272              CELLSBUSY = U.objectFieldOffset
6273 <                (k.getDeclaredField("cellsBusy"));
6274 <            Class<?> ck = CounterCell.class;
6273 >                (ConcurrentHashMap.class.getDeclaredField("cellsBusy"));
6274 >
6275              CELLVALUE = U.objectFieldOffset
6276 <                (ck.getDeclaredField("value"));
6277 <            Class<?> ak = Node[].class;
6278 <            ABASE = U.arrayBaseOffset(ak);
6279 <            int scale = U.arrayIndexScale(ak);
6276 >                (CounterCell.class.getDeclaredField("value"));
6277 >
6278 >            ABASE = U.arrayBaseOffset(Node[].class);
6279 >            int scale = U.arrayIndexScale(Node[].class);
6280              if ((scale & (scale - 1)) != 0)
6281 <                throw new Error("data type scale not a power of two");
6281 >                throw new Error("array index scale not a power of two");
6282              ASHIFT = 31 - Integer.numberOfLeadingZeros(scale);
6283 <        } catch (Exception e) {
6283 >        } catch (ReflectiveOperationException e) {
6284              throw new Error(e);
6285          }
6286      }

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines