ViewVC Help
View File | Revision Log | Show Annotations | Download File | Root Listing
root/jsr166/jsr166/src/main/java/util/concurrent/ConcurrentHashMap.java
(Generate patch)

Comparing jsr166/src/main/java/util/concurrent/ConcurrentHashMap.java (file contents):
Revision 1.228 by jsr166, Tue Jun 18 18:39:14 2013 UTC vs.
Revision 1.285 by jsr166, Sun Sep 20 17:03:22 2015 UTC

# Line 10 | Line 10 | import java.io.ObjectStreamField;
10   import java.io.Serializable;
11   import java.lang.reflect.ParameterizedType;
12   import java.lang.reflect.Type;
13 + import java.util.AbstractMap;
14   import java.util.Arrays;
15   import java.util.Collection;
15 import java.util.Comparator;
16 import java.util.ConcurrentModificationException;
16   import java.util.Enumeration;
17   import java.util.HashMap;
18   import java.util.Hashtable;
# Line 22 | Line 21 | import java.util.Map;
21   import java.util.NoSuchElementException;
22   import java.util.Set;
23   import java.util.Spliterator;
25 import java.util.concurrent.ConcurrentMap;
26 import java.util.concurrent.ForkJoinPool;
24   import java.util.concurrent.atomic.AtomicReference;
25   import java.util.concurrent.locks.LockSupport;
26   import java.util.concurrent.locks.ReentrantLock;
27   import java.util.function.BiConsumer;
28   import java.util.function.BiFunction;
32 import java.util.function.BinaryOperator;
29   import java.util.function.Consumer;
30   import java.util.function.DoubleBinaryOperator;
31   import java.util.function.Function;
32   import java.util.function.IntBinaryOperator;
33   import java.util.function.LongBinaryOperator;
34 + import java.util.function.Predicate;
35   import java.util.function.ToDoubleBiFunction;
36   import java.util.function.ToDoubleFunction;
37   import java.util.function.ToIntBiFunction;
# Line 64 | Line 61 | import java.util.stream.Stream;
61   * that key reporting the updated value.)  For aggregate operations
62   * such as {@code putAll} and {@code clear}, concurrent retrievals may
63   * reflect insertion or removal of only some entries.  Similarly,
64 < * Iterators and Enumerations return elements reflecting the state of
65 < * the hash table at some point at or since the creation of the
64 > * Iterators, Spliterators and Enumerations return elements reflecting the
65 > * state of the hash table at some point at or since the creation of the
66   * iterator/enumeration.  They do <em>not</em> throw {@link
67 < * ConcurrentModificationException}.  However, iterators are designed
68 < * to be used by only one thread at a time.  Bear in mind that the
69 < * results of aggregate status methods including {@code size}, {@code
70 < * isEmpty}, and {@code containsValue} are typically useful only when
71 < * a map is not undergoing concurrent updates in other threads.
67 > * java.util.ConcurrentModificationException ConcurrentModificationException}.
68 > * However, iterators are designed to be used by only one thread at a time.
69 > * Bear in mind that the results of aggregate status methods including
70 > * {@code size}, {@code isEmpty}, and {@code containsValue} are typically
71 > * useful only when a map is not undergoing concurrent updates in other threads.
72   * Otherwise the results of these methods reflect transient states
73   * that may be adequate for monitoring or estimation purposes, but not
74   * for program control.
# Line 104 | Line 101 | import java.util.stream.Stream;
101   * mapped values are (perhaps transiently) not used or all take the
102   * same mapping value.
103   *
104 < * <p>A ConcurrentHashMap can be used as scalable frequency map (a
104 > * <p>A ConcurrentHashMap can be used as a scalable frequency map (a
105   * form of histogram or multiset) by using {@link
106   * java.util.concurrent.atomic.LongAdder} values and initializing via
107   * {@link #computeIfAbsent computeIfAbsent}. For example, to add a count
108   * to a {@code ConcurrentHashMap<String,LongAdder> freqs}, you can use
109 < * {@code freqs.computeIfAbsent(k -> new LongAdder()).increment();}
109 > * {@code freqs.computeIfAbsent(key, k -> new LongAdder()).increment();}
110   *
111   * <p>This class and its views and iterators implement all of the
112   * <em>optional</em> methods of the {@link Map} and {@link Iterator}
# Line 124 | Line 121 | import java.util.stream.Stream;
121   * being concurrently updated by other threads; for example, when
122   * computing a snapshot summary of the values in a shared registry.
123   * There are three kinds of operation, each with four forms, accepting
124 < * functions with Keys, Values, Entries, and (Key, Value) arguments
125 < * and/or return values. Because the elements of a ConcurrentHashMap
126 < * are not ordered in any particular way, and may be processed in
127 < * different orders in different parallel executions, the correctness
128 < * of supplied functions should not depend on any ordering, or on any
129 < * other objects or values that may transiently change while
130 < * computation is in progress; and except for forEach actions, should
131 < * ideally be side-effect-free. Bulk operations on {@link Map.Entry}
132 < * objects do not support method {@code setValue}.
124 > * functions with keys, values, entries, and (key, value) pairs as
125 > * arguments and/or return values. Because the elements of a
126 > * ConcurrentHashMap are not ordered in any particular way, and may be
127 > * processed in different orders in different parallel executions, the
128 > * correctness of supplied functions should not depend on any
129 > * ordering, or on any other objects or values that may transiently
130 > * change while computation is in progress; and except for forEach
131 > * actions, should ideally be side-effect-free. Bulk operations on
132 > * {@link java.util.Map.Entry} objects do not support method {@code
133 > * setValue}.
134   *
135   * <ul>
136 < * <li> forEach: Perform a given action on each element.
136 > * <li>forEach: Performs a given action on each element.
137   * A variant form applies a given transformation on each element
138 < * before performing the action.</li>
138 > * before performing the action.
139   *
140 < * <li> search: Return the first available non-null result of
140 > * <li>search: Returns the first available non-null result of
141   * applying a given function on each element; skipping further
142 < * search when a result is found.</li>
142 > * search when a result is found.
143   *
144 < * <li> reduce: Accumulate each element.  The supplied reduction
144 > * <li>reduce: Accumulates each element.  The supplied reduction
145   * function cannot rely on ordering (more formally, it should be
146   * both associative and commutative).  There are five variants:
147   *
148   * <ul>
149   *
150 < * <li> Plain reductions. (There is not a form of this method for
150 > * <li>Plain reductions. (There is not a form of this method for
151   * (key, value) function arguments since there is no corresponding
152 < * return type.)</li>
152 > * return type.)
153   *
154 < * <li> Mapped reductions that accumulate the results of a given
155 < * function applied to each element.</li>
154 > * <li>Mapped reductions that accumulate the results of a given
155 > * function applied to each element.
156   *
157 < * <li> Reductions to scalar doubles, longs, and ints, using a
158 < * given basis value.</li>
157 > * <li>Reductions to scalar doubles, longs, and ints, using a
158 > * given basis value.
159   *
160   * </ul>
163 * </li>
161   * </ul>
162   *
163   * <p>These bulk operations accept a {@code parallelismThreshold}
# Line 235 | Line 232 | import java.util.stream.Stream;
232   * @param <K> the type of keys maintained by this map
233   * @param <V> the type of mapped values
234   */
235 < public class ConcurrentHashMap<K,V> implements ConcurrentMap<K,V>, Serializable {
235 > public class ConcurrentHashMap<K,V> extends AbstractMap<K,V>
236 >    implements ConcurrentMap<K,V>, Serializable {
237      private static final long serialVersionUID = 7249069246763182397L;
238  
239      /*
# Line 262 | Line 260 | public class ConcurrentHashMap<K,V> impl
260       * because they have negative hash fields and null key and value
261       * fields. (These special nodes are either uncommon or transient,
262       * so the impact of carrying around some unused fields is
263 <     * insignficant.)
263 >     * insignificant.)
264       *
265       * The table is lazily initialized to a power-of-two size upon the
266       * first insertion.  Each bin in the table normally contains a
# Line 343 | Line 341 | public class ConcurrentHashMap<K,V> impl
341       * The table is resized when occupancy exceeds a percentage
342       * threshold (nominally, 0.75, but see below).  Any thread
343       * noticing an overfull bin may assist in resizing after the
344 <     * initiating thread allocates and sets up the replacement
345 <     * array. However, rather than stalling, these other threads may
346 <     * proceed with insertions etc.  The use of TreeBins shields us
347 <     * from the worst case effects of overfilling while resizes are in
344 >     * initiating thread allocates and sets up the replacement array.
345 >     * However, rather than stalling, these other threads may proceed
346 >     * with insertions etc.  The use of TreeBins shields us from the
347 >     * worst case effects of overfilling while resizes are in
348       * progress.  Resizing proceeds by transferring bins, one by one,
349 <     * from the table to the next table. To enable concurrency, the
350 <     * next table must be (incrementally) prefilled with place-holders
351 <     * serving as reverse forwarders to the old table.  Because we are
349 >     * from the table to the next table. However, threads claim small
350 >     * blocks of indices to transfer (via field transferIndex) before
351 >     * doing so, reducing contention.  A generation stamp in field
352 >     * sizeCtl ensures that resizings do not overlap. Because we are
353       * using power-of-two expansion, the elements from each bin must
354       * either stay at same index, or move with a power of two
355       * offset. We eliminate unnecessary node creation by catching
# Line 371 | Line 370 | public class ConcurrentHashMap<K,V> impl
370       * locks, average aggregate waits become shorter as resizing
371       * progresses.  The transfer operation must also ensure that all
372       * accessible bins in both the old and new table are usable by any
373 <     * traversal.  This is arranged by proceeding from the last bin
374 <     * (table.length - 1) up towards the first.  Upon seeing a
375 <     * forwarding node, traversals (see class Traverser) arrange to
376 <     * move to the new table without revisiting nodes.  However, to
377 <     * ensure that no intervening nodes are skipped, bin splitting can
378 <     * only begin after the associated reverse-forwarders are in
379 <     * place.
373 >     * traversal.  This is arranged in part by proceeding from the
374 >     * last bin (table.length - 1) up towards the first.  Upon seeing
375 >     * a forwarding node, traversals (see class Traverser) arrange to
376 >     * move to the new table without revisiting nodes.  To ensure that
377 >     * no intervening nodes are skipped even when moved out of order,
378 >     * a stack (see class TableStack) is created on first encounter of
379 >     * a forwarding node during a traversal, to maintain its place if
380 >     * later processing the current table. The need for these
381 >     * save/restore mechanics is relatively rare, but when one
382 >     * forwarding node is encountered, typically many more will be.
383 >     * So Traversers use a simple caching scheme to avoid creating so
384 >     * many new TableStack nodes. (Thanks to Peter Levart for
385 >     * suggesting use of a stack here.)
386       *
387       * The traversal scheme also applies to partial traversals of
388       * ranges of bins (via an alternate Traverser constructor)
# Line 409 | Line 414 | public class ConcurrentHashMap<K,V> impl
414       * related operations (which is the main reason we cannot use
415       * existing collections such as TreeMaps). TreeBins contain
416       * Comparable elements, but may contain others, as well as
417 <     * elements that are Comparable but not necessarily Comparable
418 <     * for the same T, so we cannot invoke compareTo among them. To
419 <     * handle this, the tree is ordered primarily by hash value, then
420 <     * by Comparable.compareTo order if applicable.  On lookup at a
421 <     * node, if elements are not comparable or compare as 0 then both
422 <     * left and right children may need to be searched in the case of
423 <     * tied hash values. (This corresponds to the full list search
424 <     * that would be necessary if all elements were non-Comparable and
425 <     * had tied hashes.)  The red-black balancing code is updated from
426 <     * pre-jdk-collections
417 >     * elements that are Comparable but not necessarily Comparable for
418 >     * the same T, so we cannot invoke compareTo among them. To handle
419 >     * this, the tree is ordered primarily by hash value, then by
420 >     * Comparable.compareTo order if applicable.  On lookup at a node,
421 >     * if elements are not comparable or compare as 0 then both left
422 >     * and right children may need to be searched in the case of tied
423 >     * hash values. (This corresponds to the full list search that
424 >     * would be necessary if all elements were non-Comparable and had
425 >     * tied hashes.) On insertion, to keep a total ordering (or as
426 >     * close as is required here) across rebalancings, we compare
427 >     * classes and identityHashCodes as tie-breakers. The red-black
428 >     * balancing code is updated from pre-jdk-collections
429       * (http://gee.cs.oswego.edu/dl/classes/collections/RBCell.java)
430       * based in turn on Cormen, Leiserson, and Rivest "Introduction to
431       * Algorithms" (CLR).
432       *
433       * TreeBins also require an additional locking mechanism.  While
434       * list traversal is always possible by readers even during
435 <     * updates, tree traversal is not, mainly beause of tree-rotations
435 >     * updates, tree traversal is not, mainly because of tree-rotations
436       * that may change the root node and/or its linkages.  TreeBins
437       * include a simple read-write lock mechanism parasitic on the
438       * main bin-synchronization strategy: Structural adjustments
# Line 441 | Line 448 | public class ConcurrentHashMap<K,V> impl
448       *
449       * Maintaining API and serialization compatibility with previous
450       * versions of this class introduces several oddities. Mainly: We
451 <     * leave untouched but unused constructor arguments refering to
451 >     * leave untouched but unused constructor arguments referring to
452       * concurrencyLevel. We accept a loadFactor constructor argument,
453       * but apply it only to initial table capacity (which is the only
454       * time that we can guarantee to honor it.) We also declare an
455       * unused "Segment" class that is instantiated in minimal form
456       * only when serializing.
457       *
458 +     * Also, solely for compatibility with previous versions of this
459 +     * class, it extends AbstractMap, even though all of its methods
460 +     * are overridden, so it is just useless baggage.
461 +     *
462       * This file is organized to make things a little easier to follow
463       * while reading than they might otherwise: First the main static
464       * declarations and utilities, then fields, then main public
# Line 528 | Line 539 | public class ConcurrentHashMap<K,V> impl
539       */
540      private static final int MIN_TRANSFER_STRIDE = 16;
541  
542 +    /**
543 +     * The number of bits used for generation stamp in sizeCtl.
544 +     * Must be at least 6 for 32bit arrays.
545 +     */
546 +    private static final int RESIZE_STAMP_BITS = 16;
547 +
548 +    /**
549 +     * The maximum number of threads that can help resize.
550 +     * Must fit in 32 - RESIZE_STAMP_BITS bits.
551 +     */
552 +    private static final int MAX_RESIZERS = (1 << (32 - RESIZE_STAMP_BITS)) - 1;
553 +
554 +    /**
555 +     * The bit shift for recording size stamp in sizeCtl.
556 +     */
557 +    private static final int RESIZE_STAMP_SHIFT = 32 - RESIZE_STAMP_BITS;
558 +
559      /*
560       * Encodings for Node hash fields. See above for explanation.
561       */
562 <    static final int MOVED     = 0x8fffffff; // (-1) hash for forwarding nodes
563 <    static final int TREEBIN   = 0x80000000; // hash for heads of treea
564 <    static final int RESERVED  = 0x80000001; // hash for transient reservations
562 >    static final int MOVED     = -1; // hash for forwarding nodes
563 >    static final int TREEBIN   = -2; // hash for roots of trees
564 >    static final int RESERVED  = -3; // hash for transient reservations
565      static final int HASH_BITS = 0x7fffffff; // usable bits of normal node hash
566  
567      /** Number of CPUS, to place bounds on some sizings */
# Line 543 | Line 571 | public class ConcurrentHashMap<K,V> impl
571      private static final ObjectStreamField[] serialPersistentFields = {
572          new ObjectStreamField("segments", Segment[].class),
573          new ObjectStreamField("segmentMask", Integer.TYPE),
574 <        new ObjectStreamField("segmentShift", Integer.TYPE)
574 >        new ObjectStreamField("segmentShift", Integer.TYPE),
575      };
576  
577      /* ---------------- Nodes -------------- */
# Line 552 | Line 580 | public class ConcurrentHashMap<K,V> impl
580       * Key-value entry.  This class is never exported out as a
581       * user-mutable Map.Entry (i.e., one supporting setValue; see
582       * MapEntry below), but can be used for read-only traversals used
583 <     * in bulk tasks.  Subclasses of Node with a negativehash field
583 >     * in bulk tasks.  Subclasses of Node with a negative hash field
584       * are special, and contain null keys and values (but are never
585       * exported).  Otherwise, keys and vals are never null.
586       */
# Line 560 | Line 588 | public class ConcurrentHashMap<K,V> impl
588          final int hash;
589          final K key;
590          volatile V val;
591 <        Node<K,V> next;
591 >        volatile Node<K,V> next;
592  
593          Node(int hash, K key, V val, Node<K,V> next) {
594              this.hash = hash;
# Line 569 | Line 597 | public class ConcurrentHashMap<K,V> impl
597              this.next = next;
598          }
599  
600 <        public final K getKey()       { return key; }
601 <        public final V getValue()     { return val; }
602 <        public final int hashCode()   { return key.hashCode() ^ val.hashCode(); }
603 <        public final String toString(){ return key + "=" + val; }
600 >        public final K getKey()     { return key; }
601 >        public final V getValue()   { return val; }
602 >        public final int hashCode() { return key.hashCode() ^ val.hashCode(); }
603 >        public final String toString() {
604 >            return Helpers.mapEntryToString(key, val);
605 >        }
606          public final V setValue(V value) {
607              throw new UnsupportedOperationException();
608          }
# Line 685 | Line 715 | public class ConcurrentHashMap<K,V> impl
715       * errors by users, these checks must operate on local variables,
716       * which accounts for some odd-looking inline assignments below.
717       * Note that calls to setTabAt always occur within locked regions,
718 <     * and so do not need full volatile semantics, but still require
719 <     * ordering to maintain concurrent readability.
718 >     * and so in principle require only release ordering, not
719 >     * full volatile semantics, but are currently coded as volatile
720 >     * writes to be conservative.
721       */
722  
723      @SuppressWarnings("unchecked")
# Line 700 | Line 731 | public class ConcurrentHashMap<K,V> impl
731      }
732  
733      static final <K,V> void setTabAt(Node<K,V>[] tab, int i, Node<K,V> v) {
734 <        U.putOrderedObject(tab, ((long)i << ASHIFT) + ABASE, v);
734 >        U.putObjectVolatile(tab, ((long)i << ASHIFT) + ABASE, v);
735      }
736  
737      /* ---------------- Fields -------------- */
# Line 739 | Line 770 | public class ConcurrentHashMap<K,V> impl
770      private transient volatile int transferIndex;
771  
772      /**
742     * The least available table index to split while resizing.
743     */
744    private transient volatile int transferOrigin;
745
746    /**
773       * Spinlock (locked via CAS) used when resizing and/or creating CounterCells.
774       */
775      private transient volatile int cellsBusy;
# Line 1000 | Line 1026 | public class ConcurrentHashMap<K,V> impl
1026                                      p.val = value;
1027                              }
1028                          }
1029 +                        else if (f instanceof ReservationNode)
1030 +                            throw new IllegalStateException("Recursive update");
1031                      }
1032                  }
1033                  if (binCount != 0) {
# Line 1102 | Line 1130 | public class ConcurrentHashMap<K,V> impl
1130                                  }
1131                              }
1132                          }
1133 +                        else if (f instanceof ReservationNode)
1134 +                            throw new IllegalStateException("Recursive update");
1135                      }
1136                  }
1137                  if (validated) {
# Line 1162 | Line 1192 | public class ConcurrentHashMap<K,V> impl
1192       * operations.  It does not support the {@code add} or
1193       * {@code addAll} operations.
1194       *
1195 <     * <p>The view's {@code iterator} is a "weakly consistent" iterator
1196 <     * that will never throw {@link ConcurrentModificationException},
1197 <     * and guarantees to traverse elements as they existed upon
1198 <     * construction of the iterator, and may (but is not guaranteed to)
1199 <     * reflect any modifications subsequent to construction.
1195 >     * <p>The view's iterators and spliterators are
1196 >     * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>.
1197 >     *
1198 >     * <p>The view's {@code spliterator} reports {@link Spliterator#CONCURRENT},
1199 >     * {@link Spliterator#DISTINCT}, and {@link Spliterator#NONNULL}.
1200       *
1201       * @return the set view
1202       */
# Line 1185 | Line 1215 | public class ConcurrentHashMap<K,V> impl
1215       * {@code retainAll}, and {@code clear} operations.  It does not
1216       * support the {@code add} or {@code addAll} operations.
1217       *
1218 <     * <p>The view's {@code iterator} is a "weakly consistent" iterator
1219 <     * that will never throw {@link ConcurrentModificationException},
1220 <     * and guarantees to traverse elements as they existed upon
1221 <     * construction of the iterator, and may (but is not guaranteed to)
1222 <     * reflect any modifications subsequent to construction.
1218 >     * <p>The view's iterators and spliterators are
1219 >     * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>.
1220 >     *
1221 >     * <p>The view's {@code spliterator} reports {@link Spliterator#CONCURRENT}
1222 >     * and {@link Spliterator#NONNULL}.
1223       *
1224       * @return the collection view
1225       */
# Line 1207 | Line 1237 | public class ConcurrentHashMap<K,V> impl
1237       * {@code removeAll}, {@code retainAll}, and {@code clear}
1238       * operations.
1239       *
1240 <     * <p>The view's {@code iterator} is a "weakly consistent" iterator
1241 <     * that will never throw {@link ConcurrentModificationException},
1242 <     * and guarantees to traverse elements as they existed upon
1243 <     * construction of the iterator, and may (but is not guaranteed to)
1244 <     * reflect any modifications subsequent to construction.
1240 >     * <p>The view's iterators and spliterators are
1241 >     * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>.
1242 >     *
1243 >     * <p>The view's {@code spliterator} reports {@link Spliterator#CONCURRENT},
1244 >     * {@link Spliterator#DISTINCT}, and {@link Spliterator#NONNULL}.
1245       *
1246       * @return the set view
1247       */
# Line 1309 | Line 1339 | public class ConcurrentHashMap<K,V> impl
1339  
1340      /**
1341       * Stripped-down version of helper class used in previous version,
1342 <     * declared for the sake of serialization compatibility
1342 >     * declared for the sake of serialization compatibility.
1343       */
1344      static class Segment<K,V> extends ReentrantLock implements Serializable {
1345          private static final long serialVersionUID = 2249069246763182397L;
# Line 1321 | Line 1351 | public class ConcurrentHashMap<K,V> impl
1351       * Saves the state of the {@code ConcurrentHashMap} instance to a
1352       * stream (i.e., serializes it).
1353       * @param s the stream
1354 +     * @throws java.io.IOException if an I/O error occurs
1355       * @serialData
1356       * the key (Object) and value (Object)
1357       * for each key-value mapping, followed by a null pair.
# Line 1338 | Line 1369 | public class ConcurrentHashMap<K,V> impl
1369          }
1370          int segmentShift = 32 - sshift;
1371          int segmentMask = ssize - 1;
1372 <        @SuppressWarnings("unchecked") Segment<K,V>[] segments = (Segment<K,V>[])
1372 >        @SuppressWarnings("unchecked")
1373 >        Segment<K,V>[] segments = (Segment<K,V>[])
1374              new Segment<?,?>[DEFAULT_CONCURRENCY_LEVEL];
1375          for (int i = 0; i < segments.length; ++i)
1376              segments[i] = new Segment<K,V>(LOAD_FACTOR);
1377 <        s.putFields().put("segments", segments);
1378 <        s.putFields().put("segmentShift", segmentShift);
1379 <        s.putFields().put("segmentMask", segmentMask);
1377 >        java.io.ObjectOutputStream.PutField streamFields = s.putFields();
1378 >        streamFields.put("segments", segments);
1379 >        streamFields.put("segmentShift", segmentShift);
1380 >        streamFields.put("segmentMask", segmentMask);
1381          s.writeFields();
1382  
1383          Node<K,V>[] t;
# Line 1363 | Line 1396 | public class ConcurrentHashMap<K,V> impl
1396      /**
1397       * Reconstitutes the instance from a stream (that is, deserializes it).
1398       * @param s the stream
1399 +     * @throws ClassNotFoundException if the class of a serialized object
1400 +     *         could not be found
1401 +     * @throws java.io.IOException if an I/O error occurs
1402       */
1403      private void readObject(java.io.ObjectInputStream s)
1404          throws java.io.IOException, ClassNotFoundException {
# Line 1378 | Line 1414 | public class ConcurrentHashMap<K,V> impl
1414          long size = 0L;
1415          Node<K,V> p = null;
1416          for (;;) {
1417 <            @SuppressWarnings("unchecked") K k = (K) s.readObject();
1418 <            @SuppressWarnings("unchecked") V v = (V) s.readObject();
1417 >            @SuppressWarnings("unchecked")
1418 >            K k = (K) s.readObject();
1419 >            @SuppressWarnings("unchecked")
1420 >            V v = (V) s.readObject();
1421              if (k != null && v != null) {
1422                  p = new Node<K,V>(spread(k.hashCode()), k, v, p);
1423                  ++size;
# Line 1397 | Line 1435 | public class ConcurrentHashMap<K,V> impl
1435                  int sz = (int)size;
1436                  n = tableSizeFor(sz + (sz >>> 1) + 1);
1437              }
1438 <            @SuppressWarnings({"rawtypes","unchecked"})
1439 <                Node<K,V>[] tab = (Node<K,V>[])new Node[n];
1438 >            @SuppressWarnings("unchecked")
1439 >            Node<K,V>[] tab = (Node<K,V>[])new Node<?,?>[n];
1440              int mask = n - 1;
1441              long added = 0L;
1442              while (p != null) {
# Line 1556 | Line 1594 | public class ConcurrentHashMap<K,V> impl
1594      }
1595  
1596      /**
1597 +     * Helper method for EntrySetView.removeIf.
1598 +     */
1599 +    boolean removeEntryIf(Predicate<? super Entry<K,V>> function) {
1600 +        if (function == null) throw new NullPointerException();
1601 +        Node<K,V>[] t;
1602 +        boolean removed = false;
1603 +        if ((t = table) != null) {
1604 +            Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
1605 +            for (Node<K,V> p; (p = it.advance()) != null; ) {
1606 +                K k = p.key;
1607 +                V v = p.val;
1608 +                Map.Entry<K,V> e = new AbstractMap.SimpleImmutableEntry<>(k, v);
1609 +                if (function.test(e) && replaceNode(k, null, v) != null)
1610 +                    removed = true;
1611 +            }
1612 +        }
1613 +        return removed;
1614 +    }
1615 +
1616 +    /**
1617 +     * Helper method for ValuesView.removeIf.
1618 +     */
1619 +    boolean removeValueIf(Predicate<? super V> function) {
1620 +        if (function == null) throw new NullPointerException();
1621 +        Node<K,V>[] t;
1622 +        boolean removed = false;
1623 +        if ((t = table) != null) {
1624 +            Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
1625 +            for (Node<K,V> p; (p = it.advance()) != null; ) {
1626 +                K k = p.key;
1627 +                V v = p.val;
1628 +                if (function.test(v) && replaceNode(k, null, v) != null)
1629 +                    removed = true;
1630 +            }
1631 +        }
1632 +        return removed;
1633 +    }
1634 +
1635 +    /**
1636       * If the specified key is not already associated with a value,
1637       * attempts to compute its value using the given mapping function
1638       * and enters it into this map unless {@code null}.  The entire
# Line 1613 | Line 1690 | public class ConcurrentHashMap<K,V> impl
1690                          if (fh >= 0) {
1691                              binCount = 1;
1692                              for (Node<K,V> e = f;; ++binCount) {
1693 <                                K ek; V ev;
1693 >                                K ek;
1694                                  if (e.hash == h &&
1695                                      ((ek = e.key) == key ||
1696                                       (ek != null && key.equals(ek)))) {
# Line 1623 | Line 1700 | public class ConcurrentHashMap<K,V> impl
1700                                  Node<K,V> pred = e;
1701                                  if ((e = e.next) == null) {
1702                                      if ((val = mappingFunction.apply(key)) != null) {
1703 +                                        if (pred.next != null)
1704 +                                            throw new IllegalStateException("Recursive update");
1705                                          added = true;
1706                                          pred.next = new Node<K,V>(h, key, val, null);
1707                                      }
# Line 1642 | Line 1721 | public class ConcurrentHashMap<K,V> impl
1721                                  t.putTreeVal(h, key, val);
1722                              }
1723                          }
1724 +                        else if (f instanceof ReservationNode)
1725 +                            throw new IllegalStateException("Recursive update");
1726                      }
1727                  }
1728                  if (binCount != 0) {
# Line 1737 | Line 1818 | public class ConcurrentHashMap<K,V> impl
1818                                  }
1819                              }
1820                          }
1821 +                        else if (f instanceof ReservationNode)
1822 +                            throw new IllegalStateException("Recursive update");
1823                      }
1824                  }
1825                  if (binCount != 0)
# Line 1828 | Line 1911 | public class ConcurrentHashMap<K,V> impl
1911                                  if ((e = e.next) == null) {
1912                                      val = remappingFunction.apply(key, null);
1913                                      if (val != null) {
1914 +                                        if (pred.next != null)
1915 +                                            throw new IllegalStateException("Recursive update");
1916                                          delta = 1;
1917                                          pred.next =
1918                                              new Node<K,V>(h, key, val, null);
# Line 1860 | Line 1945 | public class ConcurrentHashMap<K,V> impl
1945                                      setTabAt(tab, i, untreeify(t.first));
1946                              }
1947                          }
1948 +                        else if (f instanceof ReservationNode)
1949 +                            throw new IllegalStateException("Recursive update");
1950                      }
1951                  }
1952                  if (binCount != 0) {
# Line 1969 | Line 2056 | public class ConcurrentHashMap<K,V> impl
2056                                      setTabAt(tab, i, untreeify(t.first));
2057                              }
2058                          }
2059 +                        else if (f instanceof ReservationNode)
2060 +                            throw new IllegalStateException("Recursive update");
2061                      }
2062                  }
2063                  if (binCount != 0) {
# Line 1986 | Line 2075 | public class ConcurrentHashMap<K,V> impl
2075      // Hashtable legacy methods
2076  
2077      /**
2078 <     * Legacy method testing if some key maps into the specified value
2079 <     * in this table.  This method is identical in functionality to
2078 >     * Tests if some key maps into the specified value in this table.
2079 >     *
2080 >     * <p>Note that this method is identical in functionality to
2081       * {@link #containsValue(Object)}, and exists solely to ensure
2082       * full compatibility with class {@link java.util.Hashtable},
2083       * which supported this method prior to introduction of the
2084 <     * Java Collections framework.
2084 >     * Java Collections Framework.
2085       *
2086       * @param  value a value to search for
2087       * @return {@code true} if and only if some key maps to the
# Line 2000 | Line 2090 | public class ConcurrentHashMap<K,V> impl
2090       *         {@code false} otherwise
2091       * @throws NullPointerException if the specified value is null
2092       */
2093 <    @Deprecated public boolean contains(Object value) {
2093 >    public boolean contains(Object value) {
2094          return containsValue(value);
2095      }
2096  
# Line 2049 | Line 2139 | public class ConcurrentHashMap<K,V> impl
2139       * Creates a new {@link Set} backed by a ConcurrentHashMap
2140       * from the given type to {@code Boolean.TRUE}.
2141       *
2142 +     * @param <K> the element type of the returned set
2143       * @return the new set
2144       * @since 1.8
2145       */
# Line 2063 | Line 2154 | public class ConcurrentHashMap<K,V> impl
2154       *
2155       * @param initialCapacity The implementation performs internal
2156       * sizing to accommodate this many elements.
2157 +     * @param <K> the element type of the returned set
2158 +     * @return the new set
2159       * @throws IllegalArgumentException if the initial capacity of
2160       * elements is negative
2068     * @return the new set
2161       * @since 1.8
2162       */
2163      public static <K> KeySetView<K,Boolean> newKeySet(int initialCapacity) {
# Line 2103 | Line 2195 | public class ConcurrentHashMap<K,V> impl
2195          }
2196  
2197          Node<K,V> find(int h, Object k) {
2198 <            Node<K,V> e; int n;
2199 <            Node<K,V>[] tab = nextTable;
2200 <            if (k != null && tab != null && (n = tab.length) > 0 &&
2201 <                (e = tabAt(tab, (n - 1) & h)) != null) {
2202 <                do {
2198 >            // loop to avoid arbitrarily deep recursion on forwarding nodes
2199 >            outer: for (Node<K,V>[] tab = nextTable;;) {
2200 >                Node<K,V> e; int n;
2201 >                if (k == null || tab == null || (n = tab.length) == 0 ||
2202 >                    (e = tabAt(tab, (n - 1) & h)) == null)
2203 >                    return null;
2204 >                for (;;) {
2205                      int eh; K ek;
2206                      if ((eh = e.hash) == h &&
2207                          ((ek = e.key) == k || (ek != null && k.equals(ek))))
2208                          return e;
2209 <                    if (eh < 0)
2210 <                        return e.find(h, k);
2211 <                } while ((e = e.next) != null);
2209 >                    if (eh < 0) {
2210 >                        if (e instanceof ForwardingNode) {
2211 >                            tab = ((ForwardingNode<K,V>)e).nextTable;
2212 >                            continue outer;
2213 >                        }
2214 >                        else
2215 >                            return e.find(h, k);
2216 >                    }
2217 >                    if ((e = e.next) == null)
2218 >                        return null;
2219 >                }
2220              }
2119            return null;
2221          }
2222      }
2223  
2224      /**
2225 <     * A place-holder node used in computeIfAbsent and compute
2225 >     * A place-holder node used in computeIfAbsent and compute.
2226       */
2227      static final class ReservationNode<K,V> extends Node<K,V> {
2228          ReservationNode() {
# Line 2136 | Line 2237 | public class ConcurrentHashMap<K,V> impl
2237      /* ---------------- Table Initialization and Resizing -------------- */
2238  
2239      /**
2240 +     * Returns the stamp bits for resizing a table of size n.
2241 +     * Must be negative when shifted left by RESIZE_STAMP_SHIFT.
2242 +     */
2243 +    static final int resizeStamp(int n) {
2244 +        return Integer.numberOfLeadingZeros(n) | (1 << (RESIZE_STAMP_BITS - 1));
2245 +    }
2246 +
2247 +    /**
2248       * Initializes table, using the size recorded in sizeCtl.
2249       */
2250      private final Node<K,V>[] initTable() {
# Line 2147 | Line 2256 | public class ConcurrentHashMap<K,V> impl
2256                  try {
2257                      if ((tab = table) == null || tab.length == 0) {
2258                          int n = (sc > 0) ? sc : DEFAULT_CAPACITY;
2259 <                        @SuppressWarnings({"rawtypes","unchecked"})
2260 <                            Node<K,V>[] nt = (Node<K,V>[])new Node[n];
2259 >                        @SuppressWarnings("unchecked")
2260 >                        Node<K,V>[] nt = (Node<K,V>[])new Node<?,?>[n];
2261                          table = tab = nt;
2262                          sc = n - (n >>> 2);
2263                      }
# Line 2189 | Line 2298 | public class ConcurrentHashMap<K,V> impl
2298              s = sumCount();
2299          }
2300          if (check >= 0) {
2301 <            Node<K,V>[] tab, nt; int sc;
2301 >            Node<K,V>[] tab, nt; int n, sc;
2302              while (s >= (long)(sc = sizeCtl) && (tab = table) != null &&
2303 <                   tab.length < MAXIMUM_CAPACITY) {
2303 >                   (n = tab.length) < MAXIMUM_CAPACITY) {
2304 >                int rs = resizeStamp(n);
2305                  if (sc < 0) {
2306 <                    if (sc == -1 || transferIndex <= transferOrigin ||
2307 <                        (nt = nextTable) == null)
2306 >                    if ((sc >>> RESIZE_STAMP_SHIFT) != rs || sc == rs + 1 ||
2307 >                        sc == rs + MAX_RESIZERS || (nt = nextTable) == null ||
2308 >                        transferIndex <= 0)
2309                          break;
2310 <                    if (U.compareAndSwapInt(this, SIZECTL, sc, sc - 1))
2310 >                    if (U.compareAndSwapInt(this, SIZECTL, sc, sc + 1))
2311                          transfer(tab, nt);
2312                  }
2313 <                else if (U.compareAndSwapInt(this, SIZECTL, sc, -2))
2313 >                else if (U.compareAndSwapInt(this, SIZECTL, sc,
2314 >                                             (rs << RESIZE_STAMP_SHIFT) + 2))
2315                      transfer(tab, null);
2316                  s = sumCount();
2317              }
# Line 2211 | Line 2323 | public class ConcurrentHashMap<K,V> impl
2323       */
2324      final Node<K,V>[] helpTransfer(Node<K,V>[] tab, Node<K,V> f) {
2325          Node<K,V>[] nextTab; int sc;
2326 <        if ((f instanceof ForwardingNode) &&
2326 >        if (tab != null && (f instanceof ForwardingNode) &&
2327              (nextTab = ((ForwardingNode<K,V>)f).nextTable) != null) {
2328 <            if (nextTab == nextTable && tab == table &&
2329 <                transferIndex > transferOrigin && (sc = sizeCtl) < -1 &&
2330 <                U.compareAndSwapInt(this, SIZECTL, sc, sc - 1))
2331 <                transfer(tab, nextTab);
2328 >            int rs = resizeStamp(tab.length);
2329 >            while (nextTab == nextTable && table == tab &&
2330 >                   (sc = sizeCtl) < 0) {
2331 >                if ((sc >>> RESIZE_STAMP_SHIFT) != rs || sc == rs + 1 ||
2332 >                    sc == rs + MAX_RESIZERS || transferIndex <= 0)
2333 >                    break;
2334 >                if (U.compareAndSwapInt(this, SIZECTL, sc, sc + 1)) {
2335 >                    transfer(tab, nextTab);
2336 >                    break;
2337 >                }
2338 >            }
2339              return nextTab;
2340          }
2341          return table;
# Line 2238 | Line 2357 | public class ConcurrentHashMap<K,V> impl
2357                  if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
2358                      try {
2359                          if (table == tab) {
2360 <                            @SuppressWarnings({"rawtypes","unchecked"})
2361 <                                Node<K,V>[] nt = (Node<K,V>[])new Node[n];
2360 >                            @SuppressWarnings("unchecked")
2361 >                            Node<K,V>[] nt = (Node<K,V>[])new Node<?,?>[n];
2362                              table = nt;
2363                              sc = n - (n >>> 2);
2364                          }
# Line 2250 | Line 2369 | public class ConcurrentHashMap<K,V> impl
2369              }
2370              else if (c <= sc || n >= MAXIMUM_CAPACITY)
2371                  break;
2372 <            else if (tab == table &&
2373 <                     U.compareAndSwapInt(this, SIZECTL, sc, -2))
2374 <                transfer(tab, null);
2372 >            else if (tab == table) {
2373 >                int rs = resizeStamp(n);
2374 >                if (U.compareAndSwapInt(this, SIZECTL, sc,
2375 >                                        (rs << RESIZE_STAMP_SHIFT) + 2))
2376 >                    transfer(tab, null);
2377 >            }
2378          }
2379      }
2380  
# Line 2266 | Line 2388 | public class ConcurrentHashMap<K,V> impl
2388              stride = MIN_TRANSFER_STRIDE; // subdivide range
2389          if (nextTab == null) {            // initiating
2390              try {
2391 <                @SuppressWarnings({"rawtypes","unchecked"})
2392 <                    Node<K,V>[] nt = (Node<K,V>[])new Node[n << 1];
2391 >                @SuppressWarnings("unchecked")
2392 >                Node<K,V>[] nt = (Node<K,V>[])new Node<?,?>[n << 1];
2393                  nextTab = nt;
2394              } catch (Throwable ex) {      // try to cope with OOME
2395                  sizeCtl = Integer.MAX_VALUE;
2396                  return;
2397              }
2398              nextTable = nextTab;
2277            transferOrigin = n;
2399              transferIndex = n;
2279            ForwardingNode<K,V> rev = new ForwardingNode<K,V>(tab);
2280            for (int k = n; k > 0;) {    // progressively reveal ready slots
2281                int nextk = (k > stride) ? k - stride : 0;
2282                for (int m = nextk; m < k; ++m)
2283                    nextTab[m] = rev;
2284                for (int m = n + nextk; m < n + k; ++m)
2285                    nextTab[m] = rev;
2286                U.putOrderedInt(this, TRANSFERORIGIN, k = nextk);
2287            }
2400          }
2401          int nextn = nextTab.length;
2402          ForwardingNode<K,V> fwd = new ForwardingNode<K,V>(nextTab);
2403          boolean advance = true;
2404 +        boolean finishing = false; // to ensure sweep before committing nextTab
2405          for (int i = 0, bound = 0;;) {
2406 <            int nextIndex, nextBound, fh; Node<K,V> f;
2406 >            Node<K,V> f; int fh;
2407              while (advance) {
2408 <                if (--i >= bound)
2408 >                int nextIndex, nextBound;
2409 >                if (--i >= bound || finishing)
2410                      advance = false;
2411 <                else if ((nextIndex = transferIndex) <= transferOrigin) {
2411 >                else if ((nextIndex = transferIndex) <= 0) {
2412                      i = -1;
2413                      advance = false;
2414                  }
# Line 2308 | Line 2422 | public class ConcurrentHashMap<K,V> impl
2422                  }
2423              }
2424              if (i < 0 || i >= n || i + n >= nextn) {
2425 <                for (int sc;;) {
2426 <                    if (U.compareAndSwapInt(this, SIZECTL, sc = sizeCtl, ++sc)) {
2427 <                        if (sc == -1) {
2428 <                            nextTable = null;
2429 <                            table = nextTab;
2430 <                            sizeCtl = (n << 1) - (n >>> 1);
2317 <                        }
2318 <                        return;
2319 <                    }
2425 >                int sc;
2426 >                if (finishing) {
2427 >                    nextTable = null;
2428 >                    table = nextTab;
2429 >                    sizeCtl = (n << 1) - (n >>> 1);
2430 >                    return;
2431                  }
2432 <            }
2433 <            else if ((f = tabAt(tab, i)) == null) {
2434 <                if (casTabAt(tab, i, null, fwd)) {
2435 <                    setTabAt(nextTab, i, null);
2436 <                    setTabAt(nextTab, i + n, null);
2326 <                    advance = true;
2432 >                if (U.compareAndSwapInt(this, SIZECTL, sc = sizeCtl, sc - 1)) {
2433 >                    if ((sc - 2) != resizeStamp(n) << RESIZE_STAMP_SHIFT)
2434 >                        return;
2435 >                    finishing = advance = true;
2436 >                    i = n; // recheck before commit
2437                  }
2438              }
2439 +            else if ((f = tabAt(tab, i)) == null)
2440 +                advance = casTabAt(tab, i, null, fwd);
2441              else if ((fh = f.hash) == MOVED)
2442                  advance = true; // already processed
2443              else {
# Line 2357 | Line 2469 | public class ConcurrentHashMap<K,V> impl
2469                                  else
2470                                      hn = new Node<K,V>(ph, pk, pv, hn);
2471                              }
2472 +                            setTabAt(nextTab, i, ln);
2473 +                            setTabAt(nextTab, i + n, hn);
2474 +                            setTabAt(tab, i, fwd);
2475 +                            advance = true;
2476                          }
2477                          else if (f instanceof TreeBin) {
2478                              TreeBin<K,V> t = (TreeBin<K,V>)f;
# Line 2388 | Line 2504 | public class ConcurrentHashMap<K,V> impl
2504                                  (hc != 0) ? new TreeBin<K,V>(lo) : t;
2505                              hn = (hc <= UNTREEIFY_THRESHOLD) ? untreeify(hi) :
2506                                  (lc != 0) ? new TreeBin<K,V>(hi) : t;
2507 +                            setTabAt(nextTab, i, ln);
2508 +                            setTabAt(nextTab, i + n, hn);
2509 +                            setTabAt(tab, i, fwd);
2510 +                            advance = true;
2511                          }
2392                        else
2393                            ln = hn = null;
2394                        setTabAt(nextTab, i, ln);
2395                        setTabAt(nextTab, i + n, hn);
2396                        setTabAt(tab, i, fwd);
2397                        advance = true;
2512                      }
2513                  }
2514              }
# Line 2513 | Line 2627 | public class ConcurrentHashMap<K,V> impl
2627       * too small, in which case resizes instead.
2628       */
2629      private final void treeifyBin(Node<K,V>[] tab, int index) {
2630 <        Node<K,V> b; int n, sc;
2630 >        Node<K,V> b; int n;
2631          if (tab != null) {
2632 <            if ((n = tab.length) < MIN_TREEIFY_CAPACITY) {
2633 <                if (tab == table && (sc = sizeCtl) >= 0 &&
2634 <                    U.compareAndSwapInt(this, SIZECTL, sc, -2))
2521 <                    transfer(tab, null);
2522 <            }
2523 <            else if ((b = tabAt(tab, index)) != null) {
2632 >            if ((n = tab.length) < MIN_TREEIFY_CAPACITY)
2633 >                tryPresize(n << 1);
2634 >            else if ((b = tabAt(tab, index)) != null && b.hash >= 0) {
2635                  synchronized (b) {
2636                      if (tabAt(tab, index) == b) {
2637                          TreeNode<K,V> hd = null, tl = null;
# Line 2542 | Line 2653 | public class ConcurrentHashMap<K,V> impl
2653      }
2654  
2655      /**
2656 <     * Returns a list on non-TreeNodes replacing those in given list
2656 >     * Returns a list on non-TreeNodes replacing those in given list.
2657       */
2658      static <K,V> Node<K,V> untreeify(Node<K,V> b) {
2659          Node<K,V> hd = null, tl = null;
# Line 2560 | Line 2671 | public class ConcurrentHashMap<K,V> impl
2671      /* ---------------- TreeNodes -------------- */
2672  
2673      /**
2674 <     * Nodes for use in TreeBins
2674 >     * Nodes for use in TreeBins.
2675       */
2676      static final class TreeNode<K,V> extends Node<K,V> {
2677          TreeNode<K,V> parent;  // red-black tree links
# Line 2586 | Line 2697 | public class ConcurrentHashMap<K,V> impl
2697          final TreeNode<K,V> findTreeNode(int h, Object k, Class<?> kc) {
2698              if (k != null) {
2699                  TreeNode<K,V> p = this;
2700 <                do  {
2700 >                do {
2701                      int ph, dir; K pk; TreeNode<K,V> q;
2702                      TreeNode<K,V> pl = p.left, pr = p.right;
2703                      if ((ph = p.hash) > h)
# Line 2595 | Line 2706 | public class ConcurrentHashMap<K,V> impl
2706                          p = pr;
2707                      else if ((pk = p.key) == k || (pk != null && k.equals(pk)))
2708                          return p;
2709 <                    else if (pl == null && pr == null)
2710 <                        break;
2709 >                    else if (pl == null)
2710 >                        p = pr;
2711 >                    else if (pr == null)
2712 >                        p = pl;
2713                      else if ((kc != null ||
2714                                (kc = comparableClassFor(k)) != null) &&
2715                               (dir = compareComparables(kc, k, pk)) != 0)
2716                          p = (dir < 0) ? pl : pr;
2717 <                    else if (pl == null)
2605 <                        p = pr;
2606 <                    else if (pr == null ||
2607 <                             (q = pr.findTreeNode(h, k, kc)) == null)
2608 <                        p = pl;
2609 <                    else
2717 >                    else if ((q = pr.findTreeNode(h, k, kc)) != null)
2718                          return q;
2719 +                    else
2720 +                        p = pl;
2721                  } while (p != null);
2722              }
2723              return null;
# Line 2634 | Line 2744 | public class ConcurrentHashMap<K,V> impl
2744          static final int READER = 4; // increment value for setting read lock
2745  
2746          /**
2747 +         * Tie-breaking utility for ordering insertions when equal
2748 +         * hashCodes and non-comparable. We don't require a total
2749 +         * order, just a consistent insertion rule to maintain
2750 +         * equivalence across rebalancings. Tie-breaking further than
2751 +         * necessary simplifies testing a bit.
2752 +         */
2753 +        static int tieBreakOrder(Object a, Object b) {
2754 +            int d;
2755 +            if (a == null || b == null ||
2756 +                (d = a.getClass().getName().
2757 +                 compareTo(b.getClass().getName())) == 0)
2758 +                d = (System.identityHashCode(a) <= System.identityHashCode(b) ?
2759 +                     -1 : 1);
2760 +            return d;
2761 +        }
2762 +
2763 +        /**
2764           * Creates bin with initial set of nodes headed by b.
2765           */
2766          TreeBin(TreeNode<K,V> b) {
# Line 2649 | Line 2776 | public class ConcurrentHashMap<K,V> impl
2776                      r = x;
2777                  }
2778                  else {
2779 <                    Object key = x.key;
2780 <                    int hash = x.hash;
2779 >                    K k = x.key;
2780 >                    int h = x.hash;
2781                      Class<?> kc = null;
2782                      for (TreeNode<K,V> p = r;;) {
2783                          int dir, ph;
2784 <                        if ((ph = p.hash) > hash)
2784 >                        K pk = p.key;
2785 >                        if ((ph = p.hash) > h)
2786                              dir = -1;
2787 <                        else if (ph < hash)
2787 >                        else if (ph < h)
2788                              dir = 1;
2789 <                        else if ((kc != null ||
2790 <                                  (kc = comparableClassFor(key)) != null))
2791 <                            dir = compareComparables(kc, key, p.key);
2792 <                        else
2665 <                            dir = 0;
2789 >                        else if ((kc == null &&
2790 >                                  (kc = comparableClassFor(k)) == null) ||
2791 >                                 (dir = compareComparables(kc, k, pk)) == 0)
2792 >                            dir = tieBreakOrder(k, pk);
2793                          TreeNode<K,V> xp = p;
2794                          if ((p = (dir <= 0) ? p.left : p.right) == null) {
2795                              x.parent = xp;
# Line 2677 | Line 2804 | public class ConcurrentHashMap<K,V> impl
2804                  }
2805              }
2806              this.root = r;
2807 +            assert checkInvariants(root);
2808          }
2809  
2810          /**
2811 <         * Acquires write lock for tree restructuring
2811 >         * Acquires write lock for tree restructuring.
2812           */
2813          private final void lockRoot() {
2814              if (!U.compareAndSwapInt(this, LOCKSTATE, 0, WRITER))
# Line 2688 | Line 2816 | public class ConcurrentHashMap<K,V> impl
2816          }
2817  
2818          /**
2819 <         * Releases write lock for tree restructuring
2819 >         * Releases write lock for tree restructuring.
2820           */
2821          private final void unlockRoot() {
2822              lockState = 0;
2823          }
2824  
2825          /**
2826 <         * Possibly blocks awaiting root lock
2826 >         * Possibly blocks awaiting root lock.
2827           */
2828          private final void contendedLock() {
2829              boolean waiting = false;
2830              for (int s;;) {
2831 <                if (((s = lockState) & WRITER) == 0) {
2831 >                if (((s = lockState) & ~WAITER) == 0) {
2832                      if (U.compareAndSwapInt(this, LOCKSTATE, s, WRITER)) {
2833                          if (waiting)
2834                              waiter = null;
2835                          return;
2836                      }
2837                  }
2838 <                else if ((s | WAITER) == 0) {
2838 >                else if ((s & WAITER) == 0) {
2839                      if (U.compareAndSwapInt(this, LOCKSTATE, s, s | WAITER)) {
2840                          waiting = true;
2841                          waiter = Thread.currentThread();
# Line 2720 | Line 2848 | public class ConcurrentHashMap<K,V> impl
2848  
2849          /**
2850           * Returns matching node or null if none. Tries to search
2851 <         * using tree compareisons from root, but continues linear
2851 >         * using tree comparisons from root, but continues linear
2852           * search when lock not available.
2853           */
2854          final Node<K,V> find(int h, Object k) {
2855              if (k != null) {
2856 <                for (Node<K,V> e = first; e != null; e = e.next) {
2856 >                for (Node<K,V> e = first; e != null; ) {
2857                      int s; K ek;
2858                      if (((s = lockState) & (WAITER|WRITER)) != 0) {
2859                          if (e.hash == h &&
2860                              ((ek = e.key) == k || (ek != null && k.equals(ek))))
2861                              return e;
2862 +                        e = e.next;
2863                      }
2864                      else if (U.compareAndSwapInt(this, LOCKSTATE, s,
2865                                                   s + READER)) {
# Line 2757 | Line 2886 | public class ConcurrentHashMap<K,V> impl
2886           */
2887          final TreeNode<K,V> putTreeVal(int h, K k, V v) {
2888              Class<?> kc = null;
2889 +            boolean searched = false;
2890              for (TreeNode<K,V> p = root;;) {
2891 <                int dir, ph; K pk; TreeNode<K,V> q, pr;
2891 >                int dir, ph; K pk;
2892                  if (p == null) {
2893                      first = root = new TreeNode<K,V>(h, k, v, null, null);
2894                      break;
# Line 2772 | Line 2902 | public class ConcurrentHashMap<K,V> impl
2902                  else if ((kc == null &&
2903                            (kc = comparableClassFor(k)) == null) ||
2904                           (dir = compareComparables(kc, k, pk)) == 0) {
2905 <                    if (p.left == null)
2906 <                        dir = 1;
2907 <                    else if ((pr = p.right) == null ||
2908 <                             (q = pr.findTreeNode(h, k, kc)) == null)
2909 <                        dir = -1;
2910 <                    else
2911 <                        return q;
2905 >                    if (!searched) {
2906 >                        TreeNode<K,V> q, ch;
2907 >                        searched = true;
2908 >                        if (((ch = p.left) != null &&
2909 >                             (q = ch.findTreeNode(h, k, kc)) != null) ||
2910 >                            ((ch = p.right) != null &&
2911 >                             (q = ch.findTreeNode(h, k, kc)) != null))
2912 >                            return q;
2913 >                    }
2914 >                    dir = tieBreakOrder(k, pk);
2915                  }
2916 +
2917                  TreeNode<K,V> xp = p;
2918 <                if ((p = (dir < 0) ? p.left : p.right) == null) {
2918 >                if ((p = (dir <= 0) ? p.left : p.right) == null) {
2919                      TreeNode<K,V> x, f = first;
2920                      first = x = new TreeNode<K,V>(h, k, v, f, xp);
2921                      if (f != null)
2922                          f.prev = x;
2923 <                    if (dir < 0)
2923 >                    if (dir <= 0)
2924                          xp.left = x;
2925                      else
2926                          xp.right = x;
# Line 2815 | Line 2949 | public class ConcurrentHashMap<K,V> impl
2949           * that are accessible independently of lock. So instead we
2950           * swap the tree linkages.
2951           *
2952 <         * @return true if now too small so should be untreeified.
2952 >         * @return true if now too small, so should be untreeified
2953           */
2954          final boolean removeTreeNode(TreeNode<K,V> p) {
2955              TreeNode<K,V> next = (TreeNode<K,V>)p.next;
# Line 3009 | Line 3143 | public class ConcurrentHashMap<K,V> impl
3143  
3144          static <K,V> TreeNode<K,V> balanceDeletion(TreeNode<K,V> root,
3145                                                     TreeNode<K,V> x) {
3146 <            for (TreeNode<K,V> xp, xpl, xpr;;)  {
3146 >            for (TreeNode<K,V> xp, xpl, xpr;;) {
3147                  if (x == null || x == root)
3148                      return root;
3149                  else if ((xp = x.parent) == null) {
# Line 3100 | Line 3234 | public class ConcurrentHashMap<K,V> impl
3234          }
3235  
3236          /**
3237 <         * Recursive invariant check
3237 >         * Checks invariants recursively for the tree of Nodes rooted at t.
3238           */
3239          static <K,V> boolean checkInvariants(TreeNode<K,V> t) {
3240              TreeNode<K,V> tp = t.parent, tl = t.left, tr = t.right,
# Line 3124 | Line 3258 | public class ConcurrentHashMap<K,V> impl
3258              return true;
3259          }
3260  
3261 <        private static final sun.misc.Unsafe U;
3261 >        private static final sun.misc.Unsafe U = sun.misc.Unsafe.getUnsafe();
3262          private static final long LOCKSTATE;
3263          static {
3264              try {
3131                U = sun.misc.Unsafe.getUnsafe();
3132                Class<?> k = TreeBin.class;
3265                  LOCKSTATE = U.objectFieldOffset
3266 <                    (k.getDeclaredField("lockState"));
3267 <            } catch (Exception e) {
3266 >                    (TreeBin.class.getDeclaredField("lockState"));
3267 >            } catch (ReflectiveOperationException e) {
3268                  throw new Error(e);
3269              }
3270          }
# Line 3141 | Line 3273 | public class ConcurrentHashMap<K,V> impl
3273      /* ----------------Table Traversal -------------- */
3274  
3275      /**
3276 +     * Records the table, its length, and current traversal index for a
3277 +     * traverser that must process a region of a forwarded table before
3278 +     * proceeding with current table.
3279 +     */
3280 +    static final class TableStack<K,V> {
3281 +        int length;
3282 +        int index;
3283 +        Node<K,V>[] tab;
3284 +        TableStack<K,V> next;
3285 +    }
3286 +
3287 +    /**
3288       * Encapsulates traversal for methods such as containsValue; also
3289       * serves as a base class for other iterators and spliterators.
3290       *
# Line 3164 | Line 3308 | public class ConcurrentHashMap<K,V> impl
3308      static class Traverser<K,V> {
3309          Node<K,V>[] tab;        // current table; updated if resized
3310          Node<K,V> next;         // the next entry to use
3311 +        TableStack<K,V> stack, spare; // to save/restore on ForwardingNodes
3312          int index;              // index of bin to use next
3313          int baseIndex;          // current index of initial table
3314          int baseLimit;          // index bound for initial table
# Line 3185 | Line 3330 | public class ConcurrentHashMap<K,V> impl
3330              if ((e = next) != null)
3331                  e = e.next;
3332              for (;;) {
3333 <                Node<K,V>[] t; int i, n; K ek;  // must use locals in checks
3333 >                Node<K,V>[] t; int i, n;  // must use locals in checks
3334                  if (e != null)
3335                      return next = e;
3336                  if (baseIndex >= baseLimit || (t = tab) == null ||
3337                      (n = t.length) <= (i = index) || i < 0)
3338                      return next = null;
3339 <                if ((e = tabAt(t, index)) != null && e.hash < 0) {
3339 >                if ((e = tabAt(t, i)) != null && e.hash < 0) {
3340                      if (e instanceof ForwardingNode) {
3341                          tab = ((ForwardingNode<K,V>)e).nextTable;
3342                          e = null;
3343 +                        pushState(t, i, n);
3344                          continue;
3345                      }
3346                      else if (e instanceof TreeBin)
# Line 3202 | Line 3348 | public class ConcurrentHashMap<K,V> impl
3348                      else
3349                          e = null;
3350                  }
3351 <                if ((index += baseSize) >= n)
3352 <                    index = ++baseIndex;    // visit upper slots if present
3351 >                if (stack != null)
3352 >                    recoverState(n);
3353 >                else if ((index = i + baseSize) >= n)
3354 >                    index = ++baseIndex; // visit upper slots if present
3355 >            }
3356 >        }
3357 >
3358 >        /**
3359 >         * Saves traversal state upon encountering a forwarding node.
3360 >         */
3361 >        private void pushState(Node<K,V>[] t, int i, int n) {
3362 >            TableStack<K,V> s = spare;  // reuse if possible
3363 >            if (s != null)
3364 >                spare = s.next;
3365 >            else
3366 >                s = new TableStack<K,V>();
3367 >            s.tab = t;
3368 >            s.length = n;
3369 >            s.index = i;
3370 >            s.next = stack;
3371 >            stack = s;
3372 >        }
3373 >
3374 >        /**
3375 >         * Possibly pops traversal state.
3376 >         *
3377 >         * @param n length of current table
3378 >         */
3379 >        private void recoverState(int n) {
3380 >            TableStack<K,V> s; int len;
3381 >            while ((s = stack) != null && (index += (len = s.length)) >= n) {
3382 >                n = len;
3383 >                index = s.index;
3384 >                tab = s.tab;
3385 >                s.tab = null;
3386 >                TableStack<K,V> next = s.next;
3387 >                s.next = spare; // save for reuse
3388 >                stack = next;
3389 >                spare = s;
3390              }
3391 +            if (s == null && (index += baseSize) >= n)
3392 +                index = ++baseIndex;
3393          }
3394      }
3395  
3396      /**
3397       * Base of key, value, and entry Iterators. Adds fields to
3398 <     * Traverser to support iterator.remove
3398 >     * Traverser to support iterator.remove.
3399       */
3400      static class BaseIterator<K,V> extends Traverser<K,V> {
3401          final ConcurrentHashMap<K,V> map;
# Line 3294 | Line 3479 | public class ConcurrentHashMap<K,V> impl
3479      }
3480  
3481      /**
3482 <     * Exported Entry for EntryIterator
3482 >     * Exported Entry for EntryIterator.
3483       */
3484      static final class MapEntry<K,V> implements Map.Entry<K,V> {
3485          final K key; // non-null
# Line 3308 | Line 3493 | public class ConcurrentHashMap<K,V> impl
3493          public K getKey()        { return key; }
3494          public V getValue()      { return val; }
3495          public int hashCode()    { return key.hashCode() ^ val.hashCode(); }
3496 <        public String toString() { return key + "=" + val; }
3496 >        public String toString() {
3497 >            return Helpers.mapEntryToString(key, val);
3498 >        }
3499  
3500          public boolean equals(Object o) {
3501              Object k, v; Map.Entry<?,?> e;
# Line 3498 | Line 3685 | public class ConcurrentHashMap<K,V> impl
3685       * for an element, or null if there is no transformation (in
3686       * which case the action is not applied)
3687       * @param action the action
3688 +     * @param <U> the return type of the transformer
3689       * @since 1.8
3690       */
3691      public <U> void forEach(long parallelismThreshold,
# Line 3521 | Line 3709 | public class ConcurrentHashMap<K,V> impl
3709       * needed for this operation to be executed in parallel
3710       * @param searchFunction a function returning a non-null
3711       * result on success, else null
3712 +     * @param <U> the return type of the search function
3713       * @return a non-null result from applying the given search
3714       * function on each (key, value), or null if none
3715       * @since 1.8
# Line 3544 | Line 3733 | public class ConcurrentHashMap<K,V> impl
3733       * for an element, or null if there is no transformation (in
3734       * which case it is not combined)
3735       * @param reducer a commutative associative combining function
3736 +     * @param <U> the return type of the transformer
3737       * @return the result of accumulating the given transformation
3738       * of all (key, value) pairs
3739       * @since 1.8
# Line 3573 | Line 3763 | public class ConcurrentHashMap<K,V> impl
3763       * of all (key, value) pairs
3764       * @since 1.8
3765       */
3766 <    public double reduceToDoubleIn(long parallelismThreshold,
3767 <                                   ToDoubleBiFunction<? super K, ? super V> transformer,
3768 <                                   double basis,
3769 <                                   DoubleBinaryOperator reducer) {
3766 >    public double reduceToDouble(long parallelismThreshold,
3767 >                                 ToDoubleBiFunction<? super K, ? super V> transformer,
3768 >                                 double basis,
3769 >                                 DoubleBinaryOperator reducer) {
3770          if (transformer == null || reducer == null)
3771              throw new NullPointerException();
3772          return new MapReduceMappingsToDoubleTask<K,V>
# Line 3662 | Line 3852 | public class ConcurrentHashMap<K,V> impl
3852       * for an element, or null if there is no transformation (in
3853       * which case the action is not applied)
3854       * @param action the action
3855 +     * @param <U> the return type of the transformer
3856       * @since 1.8
3857       */
3858      public <U> void forEachKey(long parallelismThreshold,
# Line 3685 | Line 3876 | public class ConcurrentHashMap<K,V> impl
3876       * needed for this operation to be executed in parallel
3877       * @param searchFunction a function returning a non-null
3878       * result on success, else null
3879 +     * @param <U> the return type of the search function
3880       * @return a non-null result from applying the given search
3881       * function on each key, or null if none
3882       * @since 1.8
# Line 3727 | Line 3919 | public class ConcurrentHashMap<K,V> impl
3919       * for an element, or null if there is no transformation (in
3920       * which case it is not combined)
3921       * @param reducer a commutative associative combining function
3922 +     * @param <U> the return type of the transformer
3923       * @return the result of accumulating the given transformation
3924       * of all keys
3925       * @since 1.8
# Line 3846 | Line 4039 | public class ConcurrentHashMap<K,V> impl
4039       * for an element, or null if there is no transformation (in
4040       * which case the action is not applied)
4041       * @param action the action
4042 +     * @param <U> the return type of the transformer
4043       * @since 1.8
4044       */
4045      public <U> void forEachValue(long parallelismThreshold,
# Line 3869 | Line 4063 | public class ConcurrentHashMap<K,V> impl
4063       * needed for this operation to be executed in parallel
4064       * @param searchFunction a function returning a non-null
4065       * result on success, else null
4066 +     * @param <U> the return type of the search function
4067       * @return a non-null result from applying the given search
4068       * function on each value, or null if none
4069       * @since 1.8
# Line 3910 | Line 4105 | public class ConcurrentHashMap<K,V> impl
4105       * for an element, or null if there is no transformation (in
4106       * which case it is not combined)
4107       * @param reducer a commutative associative combining function
4108 +     * @param <U> the return type of the transformer
4109       * @return the result of accumulating the given transformation
4110       * of all values
4111       * @since 1.8
# Line 4027 | Line 4223 | public class ConcurrentHashMap<K,V> impl
4223       * for an element, or null if there is no transformation (in
4224       * which case the action is not applied)
4225       * @param action the action
4226 +     * @param <U> the return type of the transformer
4227       * @since 1.8
4228       */
4229      public <U> void forEachEntry(long parallelismThreshold,
# Line 4050 | Line 4247 | public class ConcurrentHashMap<K,V> impl
4247       * needed for this operation to be executed in parallel
4248       * @param searchFunction a function returning a non-null
4249       * result on success, else null
4250 +     * @param <U> the return type of the search function
4251       * @return a non-null result from applying the given search
4252       * function on each entry, or null if none
4253       * @since 1.8
# Line 4091 | Line 4289 | public class ConcurrentHashMap<K,V> impl
4289       * for an element, or null if there is no transformation (in
4290       * which case it is not combined)
4291       * @param reducer a commutative associative combining function
4292 +     * @param <U> the return type of the transformer
4293       * @return the result of accumulating the given transformation
4294       * of all entries
4295       * @since 1.8
# Line 4213 | Line 4412 | public class ConcurrentHashMap<K,V> impl
4412          // implementations below rely on concrete classes supplying these
4413          // abstract methods
4414          /**
4415 <         * Returns a "weakly consistent" iterator that will never
4416 <         * throw {@link ConcurrentModificationException}, and
4417 <         * guarantees to traverse elements as they existed upon
4418 <         * construction of the iterator, and may (but is not
4419 <         * guaranteed to) reflect any modifications subsequent to
4420 <         * construction.
4415 >         * Returns an iterator over the elements in this collection.
4416 >         *
4417 >         * <p>The returned iterator is
4418 >         * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>.
4419 >         *
4420 >         * @return an iterator over the elements in this collection
4421           */
4422          public abstract Iterator<E> iterator();
4423          public abstract boolean contains(Object o);
4424          public abstract boolean remove(Object o);
4425  
4426 <        private static final String oomeMsg = "Required array size too large";
4426 >        private static final String OOME_MSG = "Required array size too large";
4427  
4428          public final Object[] toArray() {
4429              long sz = map.mappingCount();
4430              if (sz > MAX_ARRAY_SIZE)
4431 <                throw new OutOfMemoryError(oomeMsg);
4431 >                throw new OutOfMemoryError(OOME_MSG);
4432              int n = (int)sz;
4433              Object[] r = new Object[n];
4434              int i = 0;
4435              for (E e : this) {
4436                  if (i == n) {
4437                      if (n >= MAX_ARRAY_SIZE)
4438 <                        throw new OutOfMemoryError(oomeMsg);
4438 >                        throw new OutOfMemoryError(OOME_MSG);
4439                      if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
4440                          n = MAX_ARRAY_SIZE;
4441                      else
# Line 4252 | Line 4451 | public class ConcurrentHashMap<K,V> impl
4451          public final <T> T[] toArray(T[] a) {
4452              long sz = map.mappingCount();
4453              if (sz > MAX_ARRAY_SIZE)
4454 <                throw new OutOfMemoryError(oomeMsg);
4454 >                throw new OutOfMemoryError(OOME_MSG);
4455              int m = (int)sz;
4456              T[] r = (a.length >= m) ? a :
4457                  (T[])java.lang.reflect.Array
# Line 4262 | Line 4461 | public class ConcurrentHashMap<K,V> impl
4461              for (E e : this) {
4462                  if (i == n) {
4463                      if (n >= MAX_ARRAY_SIZE)
4464 <                        throw new OutOfMemoryError(oomeMsg);
4464 >                        throw new OutOfMemoryError(OOME_MSG);
4465                      if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
4466                          n = MAX_ARRAY_SIZE;
4467                      else
# Line 4316 | Line 4515 | public class ConcurrentHashMap<K,V> impl
4515          }
4516  
4517          public final boolean removeAll(Collection<?> c) {
4518 +            if (c == null) throw new NullPointerException();
4519              boolean modified = false;
4520              for (Iterator<E> it = iterator(); it.hasNext();) {
4521                  if (c.contains(it.next())) {
# Line 4327 | Line 4527 | public class ConcurrentHashMap<K,V> impl
4527          }
4528  
4529          public final boolean retainAll(Collection<?> c) {
4530 +            if (c == null) throw new NullPointerException();
4531              boolean modified = false;
4532              for (Iterator<E> it = iterator(); it.hasNext();) {
4533                  if (!c.contains(it.next())) {
# Line 4507 | Line 4708 | public class ConcurrentHashMap<K,V> impl
4708              throw new UnsupportedOperationException();
4709          }
4710  
4711 +        public boolean removeIf(Predicate<? super V> filter) {
4712 +            return map.removeValueIf(filter);
4713 +        }
4714 +
4715          public Spliterator<V> spliterator() {
4716              Node<K,V>[] t;
4717              ConcurrentHashMap<K,V> m = map;
# Line 4576 | Line 4781 | public class ConcurrentHashMap<K,V> impl
4781              return added;
4782          }
4783  
4784 +        public boolean removeIf(Predicate<? super Entry<K,V>> filter) {
4785 +            return map.removeEntryIf(filter);
4786 +        }
4787 +
4788          public final int hashCode() {
4789              int h = 0;
4790              Node<K,V>[] t;
# Line 4621 | Line 4830 | public class ConcurrentHashMap<K,V> impl
4830       * Base class for bulk tasks. Repeats some fields and code from
4831       * class Traverser, because we need to subclass CountedCompleter.
4832       */
4833 +    @SuppressWarnings("serial")
4834      abstract static class BulkTask<K,V,R> extends CountedCompleter<R> {
4835          Node<K,V>[] tab;        // same as Traverser
4836          Node<K,V> next;
4837 +        TableStack<K,V> stack, spare;
4838          int index;
4839          int baseIndex;
4840          int baseLimit;
# Line 4645 | Line 4856 | public class ConcurrentHashMap<K,V> impl
4856          }
4857  
4858          /**
4859 <         * Same as Traverser version
4859 >         * Same as Traverser version.
4860           */
4861          final Node<K,V> advance() {
4862              Node<K,V> e;
4863              if ((e = next) != null)
4864                  e = e.next;
4865              for (;;) {
4866 <                Node<K,V>[] t; int i, n; K ek;  // must use locals in checks
4866 >                Node<K,V>[] t; int i, n;
4867                  if (e != null)
4868                      return next = e;
4869                  if (baseIndex >= baseLimit || (t = tab) == null ||
4870                      (n = t.length) <= (i = index) || i < 0)
4871                      return next = null;
4872 <                if ((e = tabAt(t, index)) != null && e.hash < 0) {
4872 >                if ((e = tabAt(t, i)) != null && e.hash < 0) {
4873                      if (e instanceof ForwardingNode) {
4874                          tab = ((ForwardingNode<K,V>)e).nextTable;
4875                          e = null;
4876 +                        pushState(t, i, n);
4877                          continue;
4878                      }
4879                      else if (e instanceof TreeBin)
# Line 4669 | Line 4881 | public class ConcurrentHashMap<K,V> impl
4881                      else
4882                          e = null;
4883                  }
4884 <                if ((index += baseSize) >= n)
4885 <                    index = ++baseIndex;    // visit upper slots if present
4884 >                if (stack != null)
4885 >                    recoverState(n);
4886 >                else if ((index = i + baseSize) >= n)
4887 >                    index = ++baseIndex;
4888 >            }
4889 >        }
4890 >
4891 >        private void pushState(Node<K,V>[] t, int i, int n) {
4892 >            TableStack<K,V> s = spare;
4893 >            if (s != null)
4894 >                spare = s.next;
4895 >            else
4896 >                s = new TableStack<K,V>();
4897 >            s.tab = t;
4898 >            s.length = n;
4899 >            s.index = i;
4900 >            s.next = stack;
4901 >            stack = s;
4902 >        }
4903 >
4904 >        private void recoverState(int n) {
4905 >            TableStack<K,V> s; int len;
4906 >            while ((s = stack) != null && (index += (len = s.length)) >= n) {
4907 >                n = len;
4908 >                index = s.index;
4909 >                tab = s.tab;
4910 >                s.tab = null;
4911 >                TableStack<K,V> next = s.next;
4912 >                s.next = spare; // save for reuse
4913 >                stack = next;
4914 >                spare = s;
4915              }
4916 +            if (s == null && (index += baseSize) >= n)
4917 +                index = ++baseIndex;
4918          }
4919      }
4920  
# Line 5131 | Line 5374 | public class ConcurrentHashMap<K,V> impl
5374                  result = r;
5375                  CountedCompleter<?> c;
5376                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5377 <                    @SuppressWarnings("unchecked") ReduceKeysTask<K,V>
5377 >                    @SuppressWarnings("unchecked")
5378 >                    ReduceKeysTask<K,V>
5379                          t = (ReduceKeysTask<K,V>)c,
5380                          s = t.rights;
5381                      while (s != null) {
# Line 5178 | Line 5422 | public class ConcurrentHashMap<K,V> impl
5422                  result = r;
5423                  CountedCompleter<?> c;
5424                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5425 <                    @SuppressWarnings("unchecked") ReduceValuesTask<K,V>
5425 >                    @SuppressWarnings("unchecked")
5426 >                    ReduceValuesTask<K,V>
5427                          t = (ReduceValuesTask<K,V>)c,
5428                          s = t.rights;
5429                      while (s != null) {
# Line 5223 | Line 5468 | public class ConcurrentHashMap<K,V> impl
5468                  result = r;
5469                  CountedCompleter<?> c;
5470                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5471 <                    @SuppressWarnings("unchecked") ReduceEntriesTask<K,V>
5471 >                    @SuppressWarnings("unchecked")
5472 >                    ReduceEntriesTask<K,V>
5473                          t = (ReduceEntriesTask<K,V>)c,
5474                          s = t.rights;
5475                      while (s != null) {
# Line 5276 | Line 5522 | public class ConcurrentHashMap<K,V> impl
5522                  result = r;
5523                  CountedCompleter<?> c;
5524                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5525 <                    @SuppressWarnings("unchecked") MapReduceKeysTask<K,V,U>
5525 >                    @SuppressWarnings("unchecked")
5526 >                    MapReduceKeysTask<K,V,U>
5527                          t = (MapReduceKeysTask<K,V,U>)c,
5528                          s = t.rights;
5529                      while (s != null) {
# Line 5329 | Line 5576 | public class ConcurrentHashMap<K,V> impl
5576                  result = r;
5577                  CountedCompleter<?> c;
5578                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5579 <                    @SuppressWarnings("unchecked") MapReduceValuesTask<K,V,U>
5579 >                    @SuppressWarnings("unchecked")
5580 >                    MapReduceValuesTask<K,V,U>
5581                          t = (MapReduceValuesTask<K,V,U>)c,
5582                          s = t.rights;
5583                      while (s != null) {
# Line 5382 | Line 5630 | public class ConcurrentHashMap<K,V> impl
5630                  result = r;
5631                  CountedCompleter<?> c;
5632                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5633 <                    @SuppressWarnings("unchecked") MapReduceEntriesTask<K,V,U>
5633 >                    @SuppressWarnings("unchecked")
5634 >                    MapReduceEntriesTask<K,V,U>
5635                          t = (MapReduceEntriesTask<K,V,U>)c,
5636                          s = t.rights;
5637                      while (s != null) {
# Line 5435 | Line 5684 | public class ConcurrentHashMap<K,V> impl
5684                  result = r;
5685                  CountedCompleter<?> c;
5686                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5687 <                    @SuppressWarnings("unchecked") MapReduceMappingsTask<K,V,U>
5687 >                    @SuppressWarnings("unchecked")
5688 >                    MapReduceMappingsTask<K,V,U>
5689                          t = (MapReduceMappingsTask<K,V,U>)c,
5690                          s = t.rights;
5691                      while (s != null) {
# Line 5487 | Line 5737 | public class ConcurrentHashMap<K,V> impl
5737                  result = r;
5738                  CountedCompleter<?> c;
5739                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5740 <                    @SuppressWarnings("unchecked") MapReduceKeysToDoubleTask<K,V>
5740 >                    @SuppressWarnings("unchecked")
5741 >                    MapReduceKeysToDoubleTask<K,V>
5742                          t = (MapReduceKeysToDoubleTask<K,V>)c,
5743                          s = t.rights;
5744                      while (s != null) {
# Line 5536 | Line 5787 | public class ConcurrentHashMap<K,V> impl
5787                  result = r;
5788                  CountedCompleter<?> c;
5789                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5790 <                    @SuppressWarnings("unchecked") MapReduceValuesToDoubleTask<K,V>
5790 >                    @SuppressWarnings("unchecked")
5791 >                    MapReduceValuesToDoubleTask<K,V>
5792                          t = (MapReduceValuesToDoubleTask<K,V>)c,
5793                          s = t.rights;
5794                      while (s != null) {
# Line 5585 | Line 5837 | public class ConcurrentHashMap<K,V> impl
5837                  result = r;
5838                  CountedCompleter<?> c;
5839                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5840 <                    @SuppressWarnings("unchecked") MapReduceEntriesToDoubleTask<K,V>
5840 >                    @SuppressWarnings("unchecked")
5841 >                    MapReduceEntriesToDoubleTask<K,V>
5842                          t = (MapReduceEntriesToDoubleTask<K,V>)c,
5843                          s = t.rights;
5844                      while (s != null) {
# Line 5634 | Line 5887 | public class ConcurrentHashMap<K,V> impl
5887                  result = r;
5888                  CountedCompleter<?> c;
5889                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5890 <                    @SuppressWarnings("unchecked") MapReduceMappingsToDoubleTask<K,V>
5890 >                    @SuppressWarnings("unchecked")
5891 >                    MapReduceMappingsToDoubleTask<K,V>
5892                          t = (MapReduceMappingsToDoubleTask<K,V>)c,
5893                          s = t.rights;
5894                      while (s != null) {
# Line 5683 | Line 5937 | public class ConcurrentHashMap<K,V> impl
5937                  result = r;
5938                  CountedCompleter<?> c;
5939                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5940 <                    @SuppressWarnings("unchecked") MapReduceKeysToLongTask<K,V>
5940 >                    @SuppressWarnings("unchecked")
5941 >                    MapReduceKeysToLongTask<K,V>
5942                          t = (MapReduceKeysToLongTask<K,V>)c,
5943                          s = t.rights;
5944                      while (s != null) {
# Line 5732 | Line 5987 | public class ConcurrentHashMap<K,V> impl
5987                  result = r;
5988                  CountedCompleter<?> c;
5989                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
5990 <                    @SuppressWarnings("unchecked") MapReduceValuesToLongTask<K,V>
5990 >                    @SuppressWarnings("unchecked")
5991 >                    MapReduceValuesToLongTask<K,V>
5992                          t = (MapReduceValuesToLongTask<K,V>)c,
5993                          s = t.rights;
5994                      while (s != null) {
# Line 5781 | Line 6037 | public class ConcurrentHashMap<K,V> impl
6037                  result = r;
6038                  CountedCompleter<?> c;
6039                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6040 <                    @SuppressWarnings("unchecked") MapReduceEntriesToLongTask<K,V>
6040 >                    @SuppressWarnings("unchecked")
6041 >                    MapReduceEntriesToLongTask<K,V>
6042                          t = (MapReduceEntriesToLongTask<K,V>)c,
6043                          s = t.rights;
6044                      while (s != null) {
# Line 5830 | Line 6087 | public class ConcurrentHashMap<K,V> impl
6087                  result = r;
6088                  CountedCompleter<?> c;
6089                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6090 <                    @SuppressWarnings("unchecked") MapReduceMappingsToLongTask<K,V>
6090 >                    @SuppressWarnings("unchecked")
6091 >                    MapReduceMappingsToLongTask<K,V>
6092                          t = (MapReduceMappingsToLongTask<K,V>)c,
6093                          s = t.rights;
6094                      while (s != null) {
# Line 5879 | Line 6137 | public class ConcurrentHashMap<K,V> impl
6137                  result = r;
6138                  CountedCompleter<?> c;
6139                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6140 <                    @SuppressWarnings("unchecked") MapReduceKeysToIntTask<K,V>
6140 >                    @SuppressWarnings("unchecked")
6141 >                    MapReduceKeysToIntTask<K,V>
6142                          t = (MapReduceKeysToIntTask<K,V>)c,
6143                          s = t.rights;
6144                      while (s != null) {
# Line 5928 | Line 6187 | public class ConcurrentHashMap<K,V> impl
6187                  result = r;
6188                  CountedCompleter<?> c;
6189                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6190 <                    @SuppressWarnings("unchecked") MapReduceValuesToIntTask<K,V>
6190 >                    @SuppressWarnings("unchecked")
6191 >                    MapReduceValuesToIntTask<K,V>
6192                          t = (MapReduceValuesToIntTask<K,V>)c,
6193                          s = t.rights;
6194                      while (s != null) {
# Line 5977 | Line 6237 | public class ConcurrentHashMap<K,V> impl
6237                  result = r;
6238                  CountedCompleter<?> c;
6239                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6240 <                    @SuppressWarnings("unchecked") MapReduceEntriesToIntTask<K,V>
6240 >                    @SuppressWarnings("unchecked")
6241 >                    MapReduceEntriesToIntTask<K,V>
6242                          t = (MapReduceEntriesToIntTask<K,V>)c,
6243                          s = t.rights;
6244                      while (s != null) {
# Line 6026 | Line 6287 | public class ConcurrentHashMap<K,V> impl
6287                  result = r;
6288                  CountedCompleter<?> c;
6289                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
6290 <                    @SuppressWarnings("unchecked") MapReduceMappingsToIntTask<K,V>
6290 >                    @SuppressWarnings("unchecked")
6291 >                    MapReduceMappingsToIntTask<K,V>
6292                          t = (MapReduceMappingsToIntTask<K,V>)c,
6293                          s = t.rights;
6294                      while (s != null) {
# Line 6039 | Line 6301 | public class ConcurrentHashMap<K,V> impl
6301      }
6302  
6303      // Unsafe mechanics
6304 <    private static final sun.misc.Unsafe U;
6304 >    private static final sun.misc.Unsafe U = sun.misc.Unsafe.getUnsafe();
6305      private static final long SIZECTL;
6306      private static final long TRANSFERINDEX;
6045    private static final long TRANSFERORIGIN;
6307      private static final long BASECOUNT;
6308      private static final long CELLSBUSY;
6309      private static final long CELLVALUE;
6310 <    private static final long ABASE;
6310 >    private static final int ABASE;
6311      private static final int ASHIFT;
6312  
6313      static {
6314          try {
6054            U = sun.misc.Unsafe.getUnsafe();
6055            Class<?> k = ConcurrentHashMap.class;
6315              SIZECTL = U.objectFieldOffset
6316 <                (k.getDeclaredField("sizeCtl"));
6316 >                (ConcurrentHashMap.class.getDeclaredField("sizeCtl"));
6317              TRANSFERINDEX = U.objectFieldOffset
6318 <                (k.getDeclaredField("transferIndex"));
6060 <            TRANSFERORIGIN = U.objectFieldOffset
6061 <                (k.getDeclaredField("transferOrigin"));
6318 >                (ConcurrentHashMap.class.getDeclaredField("transferIndex"));
6319              BASECOUNT = U.objectFieldOffset
6320 <                (k.getDeclaredField("baseCount"));
6320 >                (ConcurrentHashMap.class.getDeclaredField("baseCount"));
6321              CELLSBUSY = U.objectFieldOffset
6322 <                (k.getDeclaredField("cellsBusy"));
6323 <            Class<?> ck = CounterCell.class;
6322 >                (ConcurrentHashMap.class.getDeclaredField("cellsBusy"));
6323 >
6324              CELLVALUE = U.objectFieldOffset
6325 <                (ck.getDeclaredField("value"));
6326 <            Class<?> ak = Node[].class;
6327 <            ABASE = U.arrayBaseOffset(ak);
6328 <            int scale = U.arrayIndexScale(ak);
6325 >                (CounterCell.class.getDeclaredField("value"));
6326 >
6327 >            ABASE = U.arrayBaseOffset(Node[].class);
6328 >            int scale = U.arrayIndexScale(Node[].class);
6329              if ((scale & (scale - 1)) != 0)
6330 <                throw new Error("data type scale not a power of two");
6330 >                throw new Error("array index scale not a power of two");
6331              ASHIFT = 31 - Integer.numberOfLeadingZeros(scale);
6332 <        } catch (Exception e) {
6332 >        } catch (ReflectiveOperationException e) {
6333              throw new Error(e);
6334          }
6335 +
6336 +        // Reduce the risk of rare disastrous classloading in first call to
6337 +        // LockSupport.park: https://bugs.openjdk.java.net/browse/JDK-8074773
6338 +        Class<?> ensureLoaded = LockSupport.class;
6339      }
6340   }

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines