ViewVC Help
View File | Revision Log | Show Annotations | Download File | Root Listing
root/jsr166/jsr166/src/main/java/util/concurrent/ConcurrentHashMap.java
(Generate patch)

Comparing jsr166/src/main/java/util/concurrent/ConcurrentHashMap.java (file contents):
Revision 1.209 by dl, Tue May 7 20:25:36 2013 UTC vs.
Revision 1.210 by dl, Tue May 21 19:10:43 2013 UTC

# Line 9 | Line 9 | import java.io.Serializable;
9   import java.io.ObjectStreamField;
10   import java.lang.reflect.ParameterizedType;
11   import java.lang.reflect.Type;
12 import java.util.AbstractCollection;
13 import java.util.AbstractMap;
14 import java.util.AbstractSet;
12   import java.util.Arrays;
13   import java.util.Collection;
14   import java.util.Comparator;
# Line 27 | Line 24 | import java.util.Spliterator;
24   import java.util.concurrent.ConcurrentMap;
25   import java.util.concurrent.ForkJoinPool;
26   import java.util.concurrent.atomic.AtomicReference;
30 import java.util.concurrent.locks.AbstractQueuedSynchronizer;
27   import java.util.concurrent.locks.ReentrantLock;
28 + import java.util.concurrent.locks.StampedLock;
29   import java.util.function.BiConsumer;
30   import java.util.function.BiFunction;
31   import java.util.function.BinaryOperator;
# Line 43 | Line 40 | import java.util.function.ToIntBiFunctio
40   import java.util.function.ToIntFunction;
41   import java.util.function.ToLongBiFunction;
42   import java.util.function.ToLongFunction;
43 + import java.util.stream.Stream;
44  
45   /**
46   * A hash table supporting full concurrency of retrievals and
# Line 96 | Line 94 | import java.util.function.ToLongFunction
94   * expected {@code concurrencyLevel} as an additional hint for
95   * internal sizing.  Note that using many keys with exactly the same
96   * {@code hashCode()} is a sure way to slow down performance of any
97 < * hash table.
97 > * hash table. To ameliorate impact, when keys are {@link Comparable},
98 > * this class may use comparison order among keys to help break ties.
99   *
100   * <p>A {@link Set} projection of a ConcurrentHashMap may be created
101   * (using {@link #newKeySet()} or {@link #newKeySet(int)}), or viewed
# Line 118 | Line 117 | import java.util.function.ToLongFunction
117   * <p>Like {@link Hashtable} but unlike {@link HashMap}, this class
118   * does <em>not</em> allow {@code null} to be used as a key or value.
119   *
120 < * <p>ConcurrentHashMaps support sequential and parallel operations
121 < * bulk operations. (Parallel forms use the {@link
122 < * ForkJoinPool#commonPool()}). Tasks that may be used in other
123 < * contexts are available in class {@link ForkJoinTasks}. These
124 < * operations are designed to be safely, and often sensibly, applied
125 < * even with maps that are being concurrently updated by other
126 < * threads; for example, when computing a snapshot summary of the
127 < * values in a shared registry.  There are three kinds of operation,
128 < * each with four forms, accepting functions with Keys, Values,
129 < * Entries, and (Key, Value) arguments and/or return values. Because
130 < * the elements of a ConcurrentHashMap are not ordered in any
131 < * particular way, and may be processed in different orders in
132 < * different parallel executions, the correctness of supplied
133 < * functions should not depend on any ordering, or on any other
134 < * objects or values that may transiently change while computation is
136 < * in progress; and except for forEach actions, should ideally be
137 < * side-effect-free.
120 > * <p>ConcurrentHashMaps support a set of sequential and parallel bulk
121 > * operations that, unlike most {@link Stream} methods, are designed
122 > * to be safely, and often sensibly, applied even with maps that are
123 > * being concurrently updated by other threads; for example, when
124 > * computing a snapshot summary of the values in a shared registry.
125 > * There are three kinds of operation, each with four forms, accepting
126 > * functions with Keys, Values, Entries, and (Key, Value) arguments
127 > * and/or return values. Because the elements of a ConcurrentHashMap
128 > * are not ordered in any particular way, and may be processed in
129 > * different orders in different parallel executions, the correctness
130 > * of supplied functions should not depend on any ordering, or on any
131 > * other objects or values that may transiently change while
132 > * computation is in progress; and except for forEach actions, should
133 > * ideally be side-effect-free. Bulk operations on {@link Map.Entry}
134 > * objects do not support method {@code setValue}.
135   *
136   * <ul>
137   * <li> forEach: Perform a given action on each element.
# Line 165 | Line 162 | import java.util.function.ToLongFunction
162   * </li>
163   * </ul>
164   *
165 + * <p>These bulk operations accept a {@code parallelismThreshold}
166 + * argument. Methods proceed sequentially if the current map size is
167 + * estimated to be less than the given threshold. Using a value of
168 + * {@code Long.MAX_VALUE} suppresses all parallelism.  Using a value
169 + * of {@code 1} results in maximal parallelism.  In-between values can
170 + * be used to trade off overhead versus throughput. Parallel forms use
171 + * the {@link ForkJoinPool#commonPool()}.
172 + *
173   * <p>The concurrency properties of bulk operations follow
174   * from those of ConcurrentHashMap: Any non-null result returned
175   * from {@code get(key)} and related access methods bears a
# Line 226 | Line 231 | import java.util.function.ToLongFunction
231   * @param <K> the type of keys maintained by this map
232   * @param <V> the type of mapped values
233   */
234 < public class ConcurrentHashMap<K,V>
235 <    implements ConcurrentMap<K,V>, Serializable {
234 > @SuppressWarnings({"unchecked", "rawtypes", "serial"})
235 > public class ConcurrentHashMap<K,V> implements ConcurrentMap<K,V>, Serializable {
236      private static final long serialVersionUID = 7249069246763182397L;
237  
238      /*
# Line 243 | Line 248 | public class ConcurrentHashMap<K,V>
248       * Each key-value mapping is held in a Node.  Because Node key
249       * fields can contain special values, they are defined using plain
250       * Object types (not type "K"). This leads to a lot of explicit
251 <     * casting (and many explicit warning suppressions to tell
252 <     * compilers not to complain about it). It also allows some of the
253 <     * public methods to be factored into a smaller number of internal
254 <     * methods (although sadly not so for the five variants of
255 <     * put-related operations). The validation-based approach
256 <     * explained below leads to a lot of code sprawl because
257 <     * retry-control precludes factoring into smaller methods.
251 >     * casting (and the use of class-wide warning suppressions).  It
252 >     * also allows some of the public methods to be factored into a
253 >     * smaller number of internal methods (although sadly not so for
254 >     * the five variants of put-related operations). The
255 >     * validation-based approach explained below leads to a lot of
256 >     * code sprawl because retry-control precludes factoring into
257 >     * smaller methods.
258       *
259       * The table is lazily initialized to a power-of-two size upon the
260       * first insertion.  Each bin in the table normally contains a
# Line 257 | Line 262 | public class ConcurrentHashMap<K,V>
262       * Table accesses require volatile/atomic reads, writes, and
263       * CASes.  Because there is no other way to arrange this without
264       * adding further indirections, we use intrinsics
265 <     * (sun.misc.Unsafe) operations.  The lists of nodes within bins
261 <     * are always accurately traversable under volatile reads, so long
262 <     * as lookups check hash code and non-nullness of value before
263 <     * checking key equality.
265 >     * (sun.misc.Unsafe) operations.
266       *
267       * We use the top (sign) bit of Node hash fields for control
268       * purposes -- it is available anyway because of addressing
# Line 284 | Line 286 | public class ConcurrentHashMap<K,V>
286       * validate that it is still the first node after locking it, and
287       * retry if not. Because new nodes are always appended to lists,
288       * once a node is first in a bin, it remains first until deleted
289 <     * or the bin becomes invalidated (upon resizing).  However,
288 <     * operations that only conditionally update may inspect nodes
289 <     * until the point of update. This is a converse of sorts to the
290 <     * lazy locking technique described by Herlihy & Shavit.
289 >     * or the bin becomes invalidated (upon resizing).
290       *
291       * The main disadvantage of per-bin locks is that other update
292       * operations on other nodes in a bin list protected by the same
# Line 458 | Line 457 | public class ConcurrentHashMap<K,V>
457       * bin.  The value reflects the approximate break-even point for
458       * using tree-based operations.
459       */
460 <    private static final int TREE_THRESHOLD = 16;
460 >    private static final int TREE_THRESHOLD = 8;
461  
462      /**
463       * Minimum number of rebinnings per transfer step. Ranges are
# Line 485 | Line 484 | public class ConcurrentHashMap<K,V>
484          new ObjectStreamField("segmentShift", Integer.TYPE)
485      };
486  
487 <    /* ---------------- Counters -------------- */
488 <
489 <    // Adapted from LongAdder and Striped64.
490 <    // See their internal docs for explanation.
491 <
493 <    // A padded cell for distributing counts
494 <    static final class Cell {
495 <        volatile long p0, p1, p2, p3, p4, p5, p6;
487 >    /**
488 >     * A padded cell for distributing counts.  Adapted from LongAdder
489 >     * and Striped64.  See their internal docs for explanation.
490 >     */
491 >    @sun.misc.Contended static final class Cell {
492          volatile long value;
497        volatile long q0, q1, q2, q3, q4, q5, q6;
493          Cell(long x) { value = x; }
494      }
495  
# Line 504 | Line 499 | public class ConcurrentHashMap<K,V>
499       * The array of bins. Lazily initialized upon first insertion.
500       * Size is always a power of two. Accessed directly by iterators.
501       */
502 <    transient volatile Node<V>[] table;
502 >    transient volatile Node<K,V>[] table;
503  
504      /**
505       * The next table to use; non-null only while resizing.
506       */
507 <    private transient volatile Node<V>[] nextTable;
507 >    private transient volatile Node<K,V>[] nextTable;
508  
509      /**
510       * Base counter value, used mainly when there is no contention,
# Line 567 | Line 562 | public class ConcurrentHashMap<K,V>
562       * inline assignments below.
563       */
564  
565 <    @SuppressWarnings("unchecked") static final <V> Node<V> tabAt
566 <        (Node<V>[] tab, int i) { // used by Traverser
572 <        return (Node<V>)U.getObjectVolatile(tab, ((long)i << ASHIFT) + ABASE);
565 >    static final <K,V> Node<K,V> tabAt(Node<K,V>[] tab, int i) {
566 >        return (Node<K,V>)U.getObjectVolatile(tab, ((long)i << ASHIFT) + ABASE);
567      }
568  
569 <    private static final <V> boolean casTabAt
570 <        (Node<V>[] tab, int i, Node<V> c, Node<V> v) {
569 >    static final <K,V> boolean casTabAt(Node<K,V>[] tab, int i,
570 >                                        Node<K,V> c, Node<K,V> v) {
571          return U.compareAndSwapObject(tab, ((long)i << ASHIFT) + ABASE, c, v);
572      }
573  
574 <    private static final <V> void setTabAt
581 <        (Node<V>[] tab, int i, Node<V> v) {
574 >    static final <K,V> void setTabAt(Node<K,V>[] tab, int i, Node<K,V> v) {
575          U.putObjectVolatile(tab, ((long)i << ASHIFT) + ABASE, v);
576      }
577  
578      /* ---------------- Nodes -------------- */
579  
580      /**
581 <     * Key-value entry. Note that this is never exported out as a
582 <     * user-visible Map.Entry (see MapEntry below). Nodes with a hash
583 <     * field of MOVED are special, and do not contain user keys or
584 <     * values.  Otherwise, keys are never null, and null val fields
585 <     * indicate that a node is in the process of being deleted or
586 <     * created. For purposes of read-only access, a key may be read
594 <     * before a val, but can only be used after checking val to be
595 <     * non-null.
581 >     * Key-value entry.  This class is never exported out as a
582 >     * user-mutable Map.Entry (i.e., one supporting setValue; see
583 >     * MapEntry below), but can be used for read-only traversals used
584 >     * in curom bulk tasks.  Nodes with a hash field of MOVED are
585 >     * special, and do not contain user keys or values (and are never
586 >     * exported).  Otherwise, keys and vals are never null.
587       */
588 <    static class Node<V> {
588 >    static class Node<K,V> implements Map.Entry<K,V> {
589          final int hash;
590          final Object key;
591          volatile V val;
592 <        volatile Node<V> next;
592 >        Node<K,V> next;
593  
594 <        Node(int hash, Object key, V val, Node<V> next) {
594 >        Node(int hash, Object key, V val, Node<K,V> next) {
595              this.hash = hash;
596              this.key = key;
597              this.val = val;
598              this.next = next;
599          }
600 +
601 +        public final K getKey()       { return (K)key; }
602 +        public final V getValue()     { return val; }
603 +        public final int hashCode()   { return key.hashCode() ^ val.hashCode(); }
604 +        public final String toString(){ return key + "=" + val; }
605 +        public final V setValue(V value) {
606 +            throw new UnsupportedOperationException();
607 +        }
608 +
609 +        public final boolean equals(Object o) {
610 +            Object k, v, u; Map.Entry<?,?> e;
611 +            return ((o instanceof Map.Entry) &&
612 +                    (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
613 +                    (v = e.getValue()) != null &&
614 +                    (k == key || k.equals(key)) &&
615 +                    (v == (u = val) || v.equals(u)));
616 +        }
617 +    }
618 +
619 +    /**
620 +     * Exported Entry for EntryIterator
621 +     */
622 +    static final class MapEntry<K,V> implements Map.Entry<K,V> {
623 +        final K key; // non-null
624 +        V val;       // non-null
625 +        final ConcurrentHashMap<K,V> map;
626 +        MapEntry(K key, V val, ConcurrentHashMap<K,V> map) {
627 +            this.key = key;
628 +            this.val = val;
629 +            this.map = map;
630 +        }
631 +        public K getKey()        { return key; }
632 +        public V getValue()      { return val; }
633 +        public int hashCode()    { return key.hashCode() ^ val.hashCode(); }
634 +        public String toString() { return key + "=" + val; }
635 +
636 +        public boolean equals(Object o) {
637 +            Object k, v; Map.Entry<?,?> e;
638 +            return ((o instanceof Map.Entry) &&
639 +                    (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
640 +                    (v = e.getValue()) != null &&
641 +                    (k == key || k.equals(key)) &&
642 +                    (v == val || v.equals(val)));
643 +        }
644 +
645 +        /**
646 +         * Sets our entry's value and writes through to the map. The
647 +         * value to return is somewhat arbitrary here. Since we do not
648 +         * necessarily track asynchronous changes, the most recent
649 +         * "previous" value could be different from what we return (or
650 +         * could even have been removed in which case the put will
651 +         * re-establish). We do not and cannot guarantee more.
652 +         */
653 +        public V setValue(V value) {
654 +            if (value == null) throw new NullPointerException();
655 +            V v = val;
656 +            val = value;
657 +            map.put(key, value);
658 +            return v;
659 +        }
660      }
661  
662 +
663      /* ---------------- TreeBins -------------- */
664  
665      /**
666       * Nodes for use in TreeBins
667       */
668 <    static final class TreeNode<V> extends Node<V> {
669 <        TreeNode<V> parent;  // red-black tree links
670 <        TreeNode<V> left;
671 <        TreeNode<V> right;
672 <        TreeNode<V> prev;    // needed to unlink next upon deletion
668 >    static final class TreeNode<K,V> extends Node<K,V> {
669 >        TreeNode<K,V> parent;  // red-black tree links
670 >        TreeNode<K,V> left;
671 >        TreeNode<K,V> right;
672 >        TreeNode<K,V> prev;    // needed to unlink next upon deletion
673          boolean red;
674  
675 <        TreeNode(int hash, Object key, V val, Node<V> next, TreeNode<V> parent) {
675 >        TreeNode(int hash, Object key, V val, Node<K,V> next,
676 >                 TreeNode<K,V> parent) {
677              super(hash, key, val, next);
678              this.parent = parent;
679          }
# Line 659 | Line 712 | public class ConcurrentHashMap<K,V>
712       * related operations (which is the main reason we cannot use
713       * existing collections such as TreeMaps). TreeBins contain
714       * Comparable elements, but may contain others, as well as
715 <     * elements that are Comparable but not necessarily Comparable<T>
715 >     * elements that are Comparable but not necessarily Comparable
716       * for the same T, so we cannot invoke compareTo among them. To
717       * handle this, the tree is ordered primarily by hash value, then
718       * by Comparable.compareTo order if applicable.  On lookup at a
# Line 676 | Line 729 | public class ConcurrentHashMap<K,V>
729       * TreeBins also maintain a separate locking discipline than
730       * regular bins. Because they are forwarded via special MOVED
731       * nodes at bin heads (which can never change once established),
732 <     * we cannot use those nodes as locks. Instead, TreeBin
733 <     * extends AbstractQueuedSynchronizer to support a simple form of
734 <     * read-write lock. For update operations and table validation,
735 <     * the exclusive form of lock behaves in the same way as bin-head
736 <     * locks. However, lookups use shared read-lock mechanics to allow
737 <     * multiple readers in the absence of writers.  Additionally,
738 <     * these lookups do not ever block: While the lock is not
739 <     * available, they proceed along the slow traversal path (via
740 <     * next-pointers) until the lock becomes available or the list is
741 <     * exhausted, whichever comes first. (These cases are not fast,
742 <     * but maximize aggregate expected throughput.)  The AQS mechanics
690 <     * for doing this are straightforward.  The lock state is held as
691 <     * AQS getState().  Read counts are negative; the write count (1)
692 <     * is positive.  There are no signalling preferences among readers
693 <     * and writers. Since we don't need to export full Lock API, we
694 <     * just override the minimal AQS methods and use them directly.
732 >     * we cannot use those nodes as locks. Instead, TreeBin extends
733 >     * StampedLock to support a form of read-write lock. For update
734 >     * operations and table validation, the exclusive form of lock
735 >     * behaves in the same way as bin-head locks. However, lookups use
736 >     * shared read-lock mechanics to allow multiple readers in the
737 >     * absence of writers.  Additionally, these lookups do not ever
738 >     * block: While the lock is not available, they proceed along the
739 >     * slow traversal path (via next-pointers) until the lock becomes
740 >     * available or the list is exhausted, whichever comes
741 >     * first. These cases are not fast, but maximize aggregate
742 >     * expected throughput.
743       */
744 <    static final class TreeBin<V> extends AbstractQueuedSynchronizer {
744 >    static final class TreeBin<K,V> extends StampedLock {
745          private static final long serialVersionUID = 2249069246763182397L;
746 <        transient TreeNode<V> root;  // root of tree
747 <        transient TreeNode<V> first; // head of next-pointer list
700 <
701 <        /* AQS overrides */
702 <        public final boolean isHeldExclusively() { return getState() > 0; }
703 <        public final boolean tryAcquire(int ignore) {
704 <            if (compareAndSetState(0, 1)) {
705 <                setExclusiveOwnerThread(Thread.currentThread());
706 <                return true;
707 <            }
708 <            return false;
709 <        }
710 <        public final boolean tryRelease(int ignore) {
711 <            setExclusiveOwnerThread(null);
712 <            setState(0);
713 <            return true;
714 <        }
715 <        public final int tryAcquireShared(int ignore) {
716 <            for (int c;;) {
717 <                if ((c = getState()) > 0)
718 <                    return -1;
719 <                if (compareAndSetState(c, c -1))
720 <                    return 1;
721 <            }
722 <        }
723 <        public final boolean tryReleaseShared(int ignore) {
724 <            int c;
725 <            do {} while (!compareAndSetState(c = getState(), c + 1));
726 <            return c == -1;
727 <        }
746 >        transient TreeNode<K,V> root;  // root of tree
747 >        transient TreeNode<K,V> first; // head of next-pointer list
748  
749          /** From CLR */
750 <        private void rotateLeft(TreeNode<V> p) {
750 >        private void rotateLeft(TreeNode<K,V> p) {
751              if (p != null) {
752 <                TreeNode<V> r = p.right, pp, rl;
752 >                TreeNode<K,V> r = p.right, pp, rl;
753                  if ((rl = p.right = r.left) != null)
754                      rl.parent = p;
755                  if ((pp = r.parent = p.parent) == null)
# Line 744 | Line 764 | public class ConcurrentHashMap<K,V>
764          }
765  
766          /** From CLR */
767 <        private void rotateRight(TreeNode<V> p) {
767 >        private void rotateRight(TreeNode<K,V> p) {
768              if (p != null) {
769 <                TreeNode<V> l = p.left, pp, lr;
769 >                TreeNode<K,V> l = p.left, pp, lr;
770                  if ((lr = p.left = l.right) != null)
771                      lr.parent = p;
772                  if ((pp = l.parent = p.parent) == null)
# Line 761 | Line 781 | public class ConcurrentHashMap<K,V>
781          }
782  
783          /**
764         * Returns the TreeNode (or null if not found) for the given
765         * key.  A front-end for recursive version.
766         */
767        final TreeNode<V> getTreeNode(int h, Object k) {
768            return getTreeNode(h, k, root, comparableClassFor(k));
769        }
770
771        /**
784           * Returns the TreeNode (or null if not found) for the given key
785           * starting at given root.
786           */
787 <        @SuppressWarnings("unchecked") final TreeNode<V> getTreeNode
788 <            (int h, Object k, TreeNode<V> p, Class<?> cc) {
787 >        final TreeNode<K,V> getTreeNode(int h, Object k, TreeNode<K,V> p,
788 >                                        Class<?> cc) {
789              while (p != null) {
790                  int dir, ph; Object pk;
791                  if ((ph = p.hash) != h)
# Line 782 | Line 794 | public class ConcurrentHashMap<K,V>
794                      return p;
795                  else if (cc == null || comparableClassFor(pk) != cc ||
796                           (dir = ((Comparable<Object>)k).compareTo(pk)) == 0) {
797 <                    TreeNode<V> r, pr; // check both sides
797 >                    TreeNode<K,V> r, pr; // check both sides
798                      if ((pr = p.right) != null && h >= pr.hash &&
799                          (r = getTreeNode(h, k, pr, cc)) != null)
800                          return r;
# Line 800 | Line 812 | public class ConcurrentHashMap<K,V>
812           * lock, searches along next links.
813           */
814          final V getValue(int h, Object k) {
815 <            Node<V> r = null;
816 <            int c = getState(); // Must read lock state first
817 <            for (Node<V> e = first; e != null; e = e.next) {
818 <                if (c <= 0 && compareAndSetState(c, c - 1)) {
815 >            Class<?> cc = comparableClassFor(k);
816 >            Node<K,V> r = null;
817 >            for (Node<K,V> e = first; e != null; e = e.next) {
818 >                long s;
819 >                if ((s = tryReadLock()) != 0L) {
820                      try {
821 <                        r = getTreeNode(h, k, root, comparableClassFor(k));
821 >                        r = getTreeNode(h, k, root, cc);
822                      } finally {
823 <                        releaseShared(0);
823 >                        unlockRead(s);
824                      }
825                      break;
826                  }
# Line 815 | Line 828 | public class ConcurrentHashMap<K,V>
828                      r = e;
829                      break;
830                  }
818                else
819                    c = getState();
831              }
832              return r == null ? null : r.val;
833          }
# Line 825 | Line 836 | public class ConcurrentHashMap<K,V>
836           * Finds or adds a node.
837           * @return null if added
838           */
839 <        @SuppressWarnings("unchecked") final TreeNode<V> putTreeNode
829 <            (int h, Object k, V v) {
839 >        final TreeNode<K,V> putTreeNode(int h, Object k, V v) {
840              Class<?> cc = comparableClassFor(k);
841 <            TreeNode<V> pp = root, p = null;
841 >            TreeNode<K,V> pp = root, p = null;
842              int dir = 0;
843              while (pp != null) { // find existing node or leaf to insert at
844                  int ph; Object pk;
# Line 839 | Line 849 | public class ConcurrentHashMap<K,V>
849                      return p;
850                  else if (cc == null || comparableClassFor(pk) != cc ||
851                           (dir = ((Comparable<Object>)k).compareTo(pk)) == 0) {
852 <                    TreeNode<V> r, pr;
852 >                    TreeNode<K,V> r, pr;
853                      if ((pr = p.right) != null && h >= pr.hash &&
854                          (r = getTreeNode(h, k, pr, cc)) != null)
855                          return r;
# Line 849 | Line 859 | public class ConcurrentHashMap<K,V>
859                  pp = (dir > 0) ? p.right : p.left;
860              }
861  
862 <            TreeNode<V> f = first;
863 <            TreeNode<V> x = first = new TreeNode<V>(h, k, v, f, p);
862 >            TreeNode<K,V> f = first;
863 >            TreeNode<K,V> x = first = new TreeNode<K,V>(h, k, v, f, p);
864              if (p == null)
865                  root = x;
866              else { // attach and rebalance; adapted from CLR
867 <                TreeNode<V> xp, xpp;
867 >                TreeNode<K,V> xp, xpp;
868                  if (f != null)
869                      f.prev = x;
870                  if (dir <= 0)
# Line 864 | Line 874 | public class ConcurrentHashMap<K,V>
874                  x.red = true;
875                  while (x != null && (xp = x.parent) != null && xp.red &&
876                         (xpp = xp.parent) != null) {
877 <                    TreeNode<V> xppl = xpp.left;
877 >                    TreeNode<K,V> xppl = xpp.left;
878                      if (xp == xppl) {
879 <                        TreeNode<V> y = xpp.right;
879 >                        TreeNode<K,V> y = xpp.right;
880                          if (y != null && y.red) {
881                              y.red = false;
882                              xp.red = false;
# Line 888 | Line 898 | public class ConcurrentHashMap<K,V>
898                          }
899                      }
900                      else {
901 <                        TreeNode<V> y = xppl;
901 >                        TreeNode<K,V> y = xppl;
902                          if (y != null && y.red) {
903                              y.red = false;
904                              xp.red = false;
# Line 910 | Line 920 | public class ConcurrentHashMap<K,V>
920                          }
921                      }
922                  }
923 <                TreeNode<V> r = root;
923 >                TreeNode<K,V> r = root;
924                  if (r != null && r.red)
925                      r.red = false;
926              }
# Line 925 | Line 935 | public class ConcurrentHashMap<K,V>
935           * that are accessible independently of lock. So instead we
936           * swap the tree linkages.
937           */
938 <        final void deleteTreeNode(TreeNode<V> p) {
939 <            TreeNode<V> next = (TreeNode<V>)p.next; // unlink traversal pointers
940 <            TreeNode<V> pred = p.prev;
938 >        final void deleteTreeNode(TreeNode<K,V> p) {
939 >            TreeNode<K,V> next = (TreeNode<K,V>)p.next;
940 >            TreeNode<K,V> pred = p.prev;  // unlink traversal pointers
941              if (pred == null)
942                  first = next;
943              else
944                  pred.next = next;
945              if (next != null)
946                  next.prev = pred;
947 <            TreeNode<V> replacement;
948 <            TreeNode<V> pl = p.left;
949 <            TreeNode<V> pr = p.right;
947 >            TreeNode<K,V> replacement;
948 >            TreeNode<K,V> pl = p.left;
949 >            TreeNode<K,V> pr = p.right;
950              if (pl != null && pr != null) {
951 <                TreeNode<V> s = pr, sl;
951 >                TreeNode<K,V> s = pr, sl;
952                  while ((sl = s.left) != null) // find successor
953                      s = sl;
954                  boolean c = s.red; s.red = p.red; p.red = c; // swap colors
955 <                TreeNode<V> sr = s.right;
956 <                TreeNode<V> pp = p.parent;
955 >                TreeNode<K,V> sr = s.right;
956 >                TreeNode<K,V> pp = p.parent;
957                  if (s == pr) { // p was s's direct parent
958                      p.parent = s;
959                      s.right = p;
960                  }
961                  else {
962 <                    TreeNode<V> sp = s.parent;
962 >                    TreeNode<K,V> sp = s.parent;
963                      if ((p.parent = sp) != null) {
964                          if (s == sp.left)
965                              sp.left = p;
# Line 974 | Line 984 | public class ConcurrentHashMap<K,V>
984              }
985              else
986                  replacement = (pl != null) ? pl : pr;
987 <            TreeNode<V> pp = p.parent;
987 >            TreeNode<K,V> pp = p.parent;
988              if (replacement == null) {
989                  if (pp == null) {
990                      root = null;
# Line 993 | Line 1003 | public class ConcurrentHashMap<K,V>
1003                  p.left = p.right = p.parent = null;
1004              }
1005              if (!p.red) { // rebalance, from CLR
1006 <                TreeNode<V> x = replacement;
1006 >                TreeNode<K,V> x = replacement;
1007                  while (x != null) {
1008 <                    TreeNode<V> xp, xpl;
1008 >                    TreeNode<K,V> xp, xpl;
1009                      if (x.red || (xp = x.parent) == null) {
1010                          x.red = false;
1011                          break;
1012                      }
1013                      if (x == (xpl = xp.left)) {
1014 <                        TreeNode<V> sib = xp.right;
1014 >                        TreeNode<K,V> sib = xp.right;
1015                          if (sib != null && sib.red) {
1016                              sib.red = false;
1017                              xp.red = true;
# Line 1011 | Line 1021 | public class ConcurrentHashMap<K,V>
1021                          if (sib == null)
1022                              x = xp;
1023                          else {
1024 <                            TreeNode<V> sl = sib.left, sr = sib.right;
1024 >                            TreeNode<K,V> sl = sib.left, sr = sib.right;
1025                              if ((sr == null || !sr.red) &&
1026                                  (sl == null || !sl.red)) {
1027                                  sib.red = true;
# Line 1040 | Line 1050 | public class ConcurrentHashMap<K,V>
1050                          }
1051                      }
1052                      else { // symmetric
1053 <                        TreeNode<V> sib = xpl;
1053 >                        TreeNode<K,V> sib = xpl;
1054                          if (sib != null && sib.red) {
1055                              sib.red = false;
1056                              xp.red = true;
# Line 1050 | Line 1060 | public class ConcurrentHashMap<K,V>
1060                          if (sib == null)
1061                              x = xp;
1062                          else {
1063 <                            TreeNode<V> sl = sib.left, sr = sib.right;
1063 >                            TreeNode<K,V> sl = sib.left, sr = sib.right;
1064                              if ((sl == null || !sl.red) &&
1065                                  (sr == null || !sr.red)) {
1066                                  sib.red = true;
# Line 1114 | Line 1124 | public class ConcurrentHashMap<K,V>
1124       * Replaces a list bin with a tree bin if key is comparable.  Call
1125       * only when locked.
1126       */
1127 <    private final void replaceWithTreeBin(Node<V>[] tab, int index, Object key) {
1128 <        if (comparableClassFor(key) != null) {
1129 <            TreeBin<V> t = new TreeBin<V>();
1130 <            for (Node<V> e = tabAt(tab, index); e != null; e = e.next)
1127 >    private final void replaceWithTreeBin(Node<K,V>[] tab, int index, Object key) {
1128 >        if (tab != null && comparableClassFor(key) != null) {
1129 >            TreeBin<K,V> t = new TreeBin<K,V>();
1130 >            for (Node<K,V> e = tabAt(tab, index); e != null; e = e.next)
1131                  t.putTreeNode(e.hash, e.key, e.val);
1132 <            setTabAt(tab, index, new Node<V>(MOVED, t, null, null));
1132 >            setTabAt(tab, index, new Node<K,V>(MOVED, t, null, null));
1133          }
1134      }
1135  
1136      /* ---------------- Internal access and update methods -------------- */
1137  
1138      /** Implementation for get and containsKey */
1139 <    @SuppressWarnings("unchecked") private final V internalGet(Object k) {
1139 >    private final V internalGet(Object k) {
1140          int h = spread(k.hashCode());
1141 <        retry: for (Node<V>[] tab = table; tab != null;) {
1142 <            Node<V> e; Object ek; V ev; int eh; // locals to read fields once
1143 <            for (e = tabAt(tab, (tab.length - 1) & h); e != null; e = e.next) {
1141 >        V v = null;
1142 >        Node<K,V>[] tab; Node<K,V> e;
1143 >        if ((tab = table) != null &&
1144 >            (e = tabAt(tab, (tab.length - 1) & h)) != null) {
1145 >            for (;;) {
1146 >                int eh; Object ek;
1147                  if ((eh = e.hash) < 0) {
1148 <                    if ((ek = e.key) instanceof TreeBin)  // search TreeBin
1149 <                        return ((TreeBin<V>)ek).getValue(h, k);
1150 <                    else {                      // restart with new table
1138 <                        tab = (Node<V>[])ek;
1139 <                        continue retry;
1148 >                    if ((ek = e.key) instanceof TreeBin) { // search TreeBin
1149 >                        v = ((TreeBin<K,V>)ek).getValue(h, k);
1150 >                        break;
1151                      }
1152 +                    else if (!(ek instanceof Node[]) ||    // try new table
1153 +                             (e = tabAt(tab = (Node<K,V>[])ek,
1154 +                                        (tab.length - 1) & h)) == null)
1155 +                        break;
1156                  }
1157 <                else if (eh == h && (ev = e.val) != null &&
1158 <                         ((ek = e.key) == k || k.equals(ek)))
1159 <                    return ev;
1157 >                else if (eh == h && ((ek = e.key) == k || k.equals(ek))) {
1158 >                    v = e.val;
1159 >                    break;
1160 >                }
1161 >                else if ((e = e.next) == null)
1162 >                    break;
1163              }
1146            break;
1164          }
1165 <        return null;
1165 >        return v;
1166      }
1167  
1168      /**
# Line 1153 | Line 1170 | public class ConcurrentHashMap<K,V>
1170       * Replaces node value with v, conditional upon match of cv if
1171       * non-null.  If resulting value is null, delete.
1172       */
1173 <    @SuppressWarnings("unchecked") private final V internalReplace
1157 <        (Object k, V v, Object cv) {
1173 >    private final V internalReplace(Object k, V v, Object cv) {
1174          int h = spread(k.hashCode());
1175          V oldVal = null;
1176 <        for (Node<V>[] tab = table;;) {
1177 <            Node<V> f; int i, fh; Object fk;
1176 >        for (Node<K,V>[] tab = table;;) {
1177 >            Node<K,V> f; int i, fh; Object fk;
1178              if (tab == null ||
1179                  (f = tabAt(tab, i = (tab.length - 1) & h)) == null)
1180                  break;
1181              else if ((fh = f.hash) < 0) {
1182                  if ((fk = f.key) instanceof TreeBin) {
1183 <                    TreeBin<V> t = (TreeBin<V>)fk;
1183 >                    TreeBin<K,V> t = (TreeBin<K,V>)fk;
1184 >                    long stamp = t.writeLock();
1185                      boolean validated = false;
1186                      boolean deleted = false;
1170                    t.acquire(0);
1187                      try {
1188                          if (tabAt(tab, i) == f) {
1189                              validated = true;
1190 <                            TreeNode<V> p = t.getTreeNode(h, k);
1190 >                            Class<?> cc = comparableClassFor(k);
1191 >                            TreeNode<K,V> p = t.getTreeNode(h, k, t.root, cc);
1192                              if (p != null) {
1193                                  V pv = p.val;
1194                                  if (cv == null || cv == pv || cv.equals(pv)) {
1195                                      oldVal = pv;
1196 <                                    if ((p.val = v) == null) {
1196 >                                    if (v != null)
1197 >                                        p.val = v;
1198 >                                    else {
1199                                          deleted = true;
1200                                          t.deleteTreeNode(p);
1201                                      }
# Line 1184 | Line 1203 | public class ConcurrentHashMap<K,V>
1203                              }
1204                          }
1205                      } finally {
1206 <                        t.release(0);
1206 >                        t.unlockWrite(stamp);
1207                      }
1208                      if (validated) {
1209                          if (deleted)
# Line 1193 | Line 1212 | public class ConcurrentHashMap<K,V>
1212                      }
1213                  }
1214                  else
1215 <                    tab = (Node<V>[])fk;
1215 >                    tab = (Node<K,V>[])fk;
1216              }
1198            else if (fh != h && f.next == null) // precheck
1199                break;                          // rules out possible existence
1217              else {
1218                  boolean validated = false;
1219                  boolean deleted = false;
1220                  synchronized (f) {
1221                      if (tabAt(tab, i) == f) {
1222                          validated = true;
1223 <                        for (Node<V> e = f, pred = null;;) {
1224 <                            Object ek; V ev;
1223 >                        for (Node<K,V> e = f, pred = null;;) {
1224 >                            Object ek;
1225                              if (e.hash == h &&
1209                                ((ev = e.val) != null) &&
1226                                  ((ek = e.key) == k || k.equals(ek))) {
1227 +                                V ev = e.val;
1228                                  if (cv == null || cv == ev || cv.equals(ev)) {
1229                                      oldVal = ev;
1230 <                                    if ((e.val = v) == null) {
1230 >                                    if (v != null)
1231 >                                        e.val = v;
1232 >                                    else {
1233                                          deleted = true;
1234 <                                        Node<V> en = e.next;
1234 >                                        Node<K,V> en = e.next;
1235                                          if (pred != null)
1236                                              pred.next = en;
1237                                          else
# Line 1256 | Line 1275 | public class ConcurrentHashMap<K,V>
1275       */
1276  
1277      /** Implementation for put and putIfAbsent */
1278 <    @SuppressWarnings("unchecked") private final V internalPut
1260 <        (K k, V v, boolean onlyIfAbsent) {
1278 >    private final V internalPut(K k, V v, boolean onlyIfAbsent) {
1279          if (k == null || v == null) throw new NullPointerException();
1280          int h = spread(k.hashCode());
1281          int len = 0;
1282 <        for (Node<V>[] tab = table;;) {
1283 <            int i, fh; Node<V> f; Object fk; V fv;
1282 >        for (Node<K,V>[] tab = table;;) {
1283 >            int i, fh; Node<K,V> f; Object fk;
1284              if (tab == null)
1285                  tab = initTable();
1286              else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1287 <                if (casTabAt(tab, i, null, new Node<V>(h, k, v, null)))
1287 >                if (casTabAt(tab, i, null, new Node<K,V>(h, k, v, null)))
1288                      break;                   // no lock when adding to empty bin
1289              }
1290              else if ((fh = f.hash) < 0) {
1291                  if ((fk = f.key) instanceof TreeBin) {
1292 <                    TreeBin<V> t = (TreeBin<V>)fk;
1292 >                    TreeBin<K,V> t = (TreeBin<K,V>)fk;
1293 >                    long stamp = t.writeLock();
1294                      V oldVal = null;
1276                    t.acquire(0);
1295                      try {
1296                          if (tabAt(tab, i) == f) {
1297                              len = 2;
1298 <                            TreeNode<V> p = t.putTreeNode(h, k, v);
1298 >                            TreeNode<K,V> p = t.putTreeNode(h, k, v);
1299                              if (p != null) {
1300                                  oldVal = p.val;
1301                                  if (!onlyIfAbsent)
# Line 1285 | Line 1303 | public class ConcurrentHashMap<K,V>
1303                              }
1304                          }
1305                      } finally {
1306 <                        t.release(0);
1306 >                        t.unlockWrite(stamp);
1307                      }
1308                      if (len != 0) {
1309                          if (oldVal != null)
# Line 1294 | Line 1312 | public class ConcurrentHashMap<K,V>
1312                      }
1313                  }
1314                  else
1315 <                    tab = (Node<V>[])fk;
1315 >                    tab = (Node<K,V>[])fk;
1316              }
1299            else if (onlyIfAbsent && fh == h && (fv = f.val) != null &&
1300                     ((fk = f.key) == k || k.equals(fk))) // peek while nearby
1301                return fv;
1317              else {
1318                  V oldVal = null;
1319                  synchronized (f) {
1320                      if (tabAt(tab, i) == f) {
1321                          len = 1;
1322 <                        for (Node<V> e = f;; ++len) {
1323 <                            Object ek; V ev;
1322 >                        for (Node<K,V> e = f;; ++len) {
1323 >                            Object ek;
1324                              if (e.hash == h &&
1310                                (ev = e.val) != null &&
1325                                  ((ek = e.key) == k || k.equals(ek))) {
1326 <                                oldVal = ev;
1326 >                                oldVal = e.val;
1327                                  if (!onlyIfAbsent)
1328                                      e.val = v;
1329                                  break;
1330                              }
1331 <                            Node<V> last = e;
1331 >                            Node<K,V> last = e;
1332                              if ((e = e.next) == null) {
1333 <                                last.next = new Node<V>(h, k, v, null);
1334 <                                if (len >= TREE_THRESHOLD)
1333 >                                last.next = new Node<K,V>(h, k, v, null);
1334 >                                if (len > TREE_THRESHOLD)
1335                                      replaceWithTreeBin(tab, i, k);
1336                                  break;
1337                              }
# Line 1336 | Line 1350 | public class ConcurrentHashMap<K,V>
1350      }
1351  
1352      /** Implementation for computeIfAbsent */
1353 <    @SuppressWarnings("unchecked") private final V internalComputeIfAbsent
1340 <        (K k, Function<? super K, ? extends V> mf) {
1353 >    private final V internalComputeIfAbsent(K k, Function<? super K, ? extends V> mf) {
1354          if (k == null || mf == null)
1355              throw new NullPointerException();
1356          int h = spread(k.hashCode());
1357          V val = null;
1358          int len = 0;
1359 <        for (Node<V>[] tab = table;;) {
1360 <            Node<V> f; int i; Object fk;
1359 >        for (Node<K,V>[] tab = table;;) {
1360 >            Node<K,V> f; int i; Object fk;
1361              if (tab == null)
1362                  tab = initTable();
1363              else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1364 <                Node<V> node = new Node<V>(h, k, null, null);
1364 >                Node<K,V> node = new Node<K,V>(h, k, null, null);
1365                  synchronized (node) {
1366                      if (casTabAt(tab, i, null, node)) {
1367                          len = 1;
# Line 1366 | Line 1379 | public class ConcurrentHashMap<K,V>
1379              }
1380              else if (f.hash < 0) {
1381                  if ((fk = f.key) instanceof TreeBin) {
1382 <                    TreeBin<V> t = (TreeBin<V>)fk;
1382 >                    TreeBin<K,V> t = (TreeBin<K,V>)fk;
1383 >                    long stamp = t.writeLock();
1384                      boolean added = false;
1371                    t.acquire(0);
1385                      try {
1386                          if (tabAt(tab, i) == f) {
1387 <                            len = 1;
1388 <                            TreeNode<V> p = t.getTreeNode(h, k);
1387 >                            len = 2;
1388 >                            Class<?> cc = comparableClassFor(k);
1389 >                            TreeNode<K,V> p = t.getTreeNode(h, k, t.root, cc);
1390                              if (p != null)
1391                                  val = p.val;
1392                              else if ((val = mf.apply(k)) != null) {
1393                                  added = true;
1380                                len = 2;
1394                                  t.putTreeNode(h, k, val);
1395                              }
1396                          }
1397                      } finally {
1398 <                        t.release(0);
1398 >                        t.unlockWrite(stamp);
1399                      }
1400                      if (len != 0) {
1401                          if (!added)
# Line 1391 | Line 1404 | public class ConcurrentHashMap<K,V>
1404                      }
1405                  }
1406                  else
1407 <                    tab = (Node<V>[])fk;
1407 >                    tab = (Node<K,V>[])fk;
1408              }
1409              else {
1397                for (Node<V> e = f; e != null; e = e.next) { // prescan
1398                    Object ek; V ev;
1399                    if (e.hash == h && (ev = e.val) != null &&
1400                        ((ek = e.key) == k || k.equals(ek)))
1401                        return ev;
1402                }
1410                  boolean added = false;
1411                  synchronized (f) {
1412                      if (tabAt(tab, i) == f) {
1413                          len = 1;
1414 <                        for (Node<V> e = f;; ++len) {
1414 >                        for (Node<K,V> e = f;; ++len) {
1415                              Object ek; V ev;
1416                              if (e.hash == h &&
1410                                (ev = e.val) != null &&
1417                                  ((ek = e.key) == k || k.equals(ek))) {
1418 <                                val = ev;
1418 >                                val = e.val;
1419                                  break;
1420                              }
1421 <                            Node<V> last = e;
1421 >                            Node<K,V> last = e;
1422                              if ((e = e.next) == null) {
1423                                  if ((val = mf.apply(k)) != null) {
1424                                      added = true;
1425 <                                    last.next = new Node<V>(h, k, val, null);
1426 <                                    if (len >= TREE_THRESHOLD)
1425 >                                    last.next = new Node<K,V>(h, k, val, null);
1426 >                                    if (len > TREE_THRESHOLD)
1427                                          replaceWithTreeBin(tab, i, k);
1428                                  }
1429                                  break;
# Line 1438 | Line 1444 | public class ConcurrentHashMap<K,V>
1444      }
1445  
1446      /** Implementation for compute */
1447 <    @SuppressWarnings("unchecked") private final V internalCompute
1448 <        (K k, boolean onlyIfPresent,
1443 <         BiFunction<? super K, ? super V, ? extends V> mf) {
1447 >    private final V internalCompute(K k, boolean onlyIfPresent,
1448 >                                    BiFunction<? super K, ? super V, ? extends V> mf) {
1449          if (k == null || mf == null)
1450              throw new NullPointerException();
1451          int h = spread(k.hashCode());
1452          V val = null;
1453          int delta = 0;
1454          int len = 0;
1455 <        for (Node<V>[] tab = table;;) {
1456 <            Node<V> f; int i, fh; Object fk;
1455 >        for (Node<K,V>[] tab = table;;) {
1456 >            Node<K,V> f; int i, fh; Object fk;
1457              if (tab == null)
1458                  tab = initTable();
1459              else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1460                  if (onlyIfPresent)
1461                      break;
1462 <                Node<V> node = new Node<V>(h, k, null, null);
1462 >                Node<K,V> node = new Node<K,V>(h, k, null, null);
1463                  synchronized (node) {
1464                      if (casTabAt(tab, i, null, node)) {
1465                          try {
# Line 1474 | Line 1479 | public class ConcurrentHashMap<K,V>
1479              }
1480              else if ((fh = f.hash) < 0) {
1481                  if ((fk = f.key) instanceof TreeBin) {
1482 <                    TreeBin<V> t = (TreeBin<V>)fk;
1483 <                    t.acquire(0);
1482 >                    TreeBin<K,V> t = (TreeBin<K,V>)fk;
1483 >                    long stamp = t.writeLock();
1484                      try {
1485                          if (tabAt(tab, i) == f) {
1486 <                            len = 1;
1487 <                            TreeNode<V> p = t.getTreeNode(h, k);
1488 <                            if (p == null && onlyIfPresent)
1489 <                                break;
1490 <                            V pv = (p == null) ? null : p.val;
1491 <                            if ((val = mf.apply(k, pv)) != null) {
1492 <                                if (p != null)
1493 <                                    p.val = val;
1494 <                                else {
1495 <                                    len = 2;
1496 <                                    delta = 1;
1497 <                                    t.putTreeNode(h, k, val);
1486 >                            len = 2;
1487 >                            Class<?> cc = comparableClassFor(k);
1488 >                            TreeNode<K,V> p = t.getTreeNode(h, k, t.root, cc);
1489 >                            if (p != null || !onlyIfPresent) {
1490 >                                V pv = (p == null) ? null : p.val;
1491 >                                if ((val = mf.apply(k, pv)) != null) {
1492 >                                    if (p != null)
1493 >                                        p.val = val;
1494 >                                    else {
1495 >                                        delta = 1;
1496 >                                        t.putTreeNode(h, k, val);
1497 >                                    }
1498 >                                }
1499 >                                else if (p != null) {
1500 >                                    delta = -1;
1501 >                                    t.deleteTreeNode(p);
1502                                  }
1494                            }
1495                            else if (p != null) {
1496                                delta = -1;
1497                                t.deleteTreeNode(p);
1503                              }
1504                          }
1505                      } finally {
1506 <                        t.release(0);
1506 >                        t.unlockWrite(stamp);
1507                      }
1508                      if (len != 0)
1509                          break;
1510                  }
1511                  else
1512 <                    tab = (Node<V>[])fk;
1512 >                    tab = (Node<K,V>[])fk;
1513              }
1514              else {
1515                  synchronized (f) {
1516                      if (tabAt(tab, i) == f) {
1517                          len = 1;
1518 <                        for (Node<V> e = f, pred = null;; ++len) {
1519 <                            Object ek; V ev;
1518 >                        for (Node<K,V> e = f, pred = null;; ++len) {
1519 >                            Object ek;
1520                              if (e.hash == h &&
1516                                (ev = e.val) != null &&
1521                                  ((ek = e.key) == k || k.equals(ek))) {
1522 <                                val = mf.apply(k, ev);
1522 >                                val = mf.apply(k, e.val);
1523                                  if (val != null)
1524                                      e.val = val;
1525                                  else {
1526                                      delta = -1;
1527 <                                    Node<V> en = e.next;
1527 >                                    Node<K,V> en = e.next;
1528                                      if (pred != null)
1529                                          pred.next = en;
1530                                      else
# Line 1532 | Line 1536 | public class ConcurrentHashMap<K,V>
1536                              if ((e = e.next) == null) {
1537                                  if (!onlyIfPresent &&
1538                                      (val = mf.apply(k, null)) != null) {
1539 <                                    pred.next = new Node<V>(h, k, val, null);
1539 >                                    pred.next = new Node<K,V>(h, k, val, null);
1540                                      delta = 1;
1541 <                                    if (len >= TREE_THRESHOLD)
1541 >                                    if (len > TREE_THRESHOLD)
1542                                          replaceWithTreeBin(tab, i, k);
1543                                  }
1544                                  break;
# Line 1552 | Line 1556 | public class ConcurrentHashMap<K,V>
1556      }
1557  
1558      /** Implementation for merge */
1559 <    @SuppressWarnings("unchecked") private final V internalMerge
1560 <        (K k, V v, BiFunction<? super V, ? super V, ? extends V> mf) {
1559 >    private final V internalMerge(K k, V v,
1560 >                                  BiFunction<? super V, ? super V, ? extends V> mf) {
1561          if (k == null || v == null || mf == null)
1562              throw new NullPointerException();
1563          int h = spread(k.hashCode());
1564          V val = null;
1565          int delta = 0;
1566          int len = 0;
1567 <        for (Node<V>[] tab = table;;) {
1568 <            int i; Node<V> f; Object fk; V fv;
1567 >        for (Node<K,V>[] tab = table;;) {
1568 >            int i; Node<K,V> f; Object fk;
1569              if (tab == null)
1570                  tab = initTable();
1571              else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1572 <                if (casTabAt(tab, i, null, new Node<V>(h, k, v, null))) {
1572 >                if (casTabAt(tab, i, null, new Node<K,V>(h, k, v, null))) {
1573                      delta = 1;
1574                      val = v;
1575                      break;
# Line 1573 | Line 1577 | public class ConcurrentHashMap<K,V>
1577              }
1578              else if (f.hash < 0) {
1579                  if ((fk = f.key) instanceof TreeBin) {
1580 <                    TreeBin<V> t = (TreeBin<V>)fk;
1581 <                    t.acquire(0);
1580 >                    TreeBin<K,V> t = (TreeBin<K,V>)fk;
1581 >                    long stamp = t.writeLock();
1582                      try {
1583                          if (tabAt(tab, i) == f) {
1584 <                            len = 1;
1585 <                            TreeNode<V> p = t.getTreeNode(h, k);
1584 >                            len = 2;
1585 >                            Class<?> cc = comparableClassFor(k);
1586 >                            TreeNode<K,V> p = t.getTreeNode(h, k, t.root, cc);
1587                              val = (p == null) ? v : mf.apply(p.val, v);
1588                              if (val != null) {
1589                                  if (p != null)
1590                                      p.val = val;
1591                                  else {
1587                                    len = 2;
1592                                      delta = 1;
1593                                      t.putTreeNode(h, k, val);
1594                                  }
# Line 1595 | Line 1599 | public class ConcurrentHashMap<K,V>
1599                              }
1600                          }
1601                      } finally {
1602 <                        t.release(0);
1602 >                        t.unlockWrite(stamp);
1603                      }
1604                      if (len != 0)
1605                          break;
1606                  }
1607                  else
1608 <                    tab = (Node<V>[])fk;
1608 >                    tab = (Node<K,V>[])fk;
1609              }
1610              else {
1611                  synchronized (f) {
1612                      if (tabAt(tab, i) == f) {
1613                          len = 1;
1614 <                        for (Node<V> e = f, pred = null;; ++len) {
1615 <                            Object ek; V ev;
1614 >                        for (Node<K,V> e = f, pred = null;; ++len) {
1615 >                            Object ek;
1616                              if (e.hash == h &&
1613                                (ev = e.val) != null &&
1617                                  ((ek = e.key) == k || k.equals(ek))) {
1618 <                                val = mf.apply(ev, v);
1618 >                                val = mf.apply(e.val, v);
1619                                  if (val != null)
1620                                      e.val = val;
1621                                  else {
1622                                      delta = -1;
1623 <                                    Node<V> en = e.next;
1623 >                                    Node<K,V> en = e.next;
1624                                      if (pred != null)
1625                                          pred.next = en;
1626                                      else
# Line 1627 | Line 1630 | public class ConcurrentHashMap<K,V>
1630                              }
1631                              pred = e;
1632                              if ((e = e.next) == null) {
1630                                val = v;
1631                                pred.next = new Node<V>(h, k, val, null);
1633                                  delta = 1;
1634 <                                if (len >= TREE_THRESHOLD)
1634 >                                val = v;
1635 >                                pred.next = new Node<K,V>(h, k, val, null);
1636 >                                if (len > TREE_THRESHOLD)
1637                                      replaceWithTreeBin(tab, i, k);
1638                                  break;
1639                              }
# Line 1647 | Line 1650 | public class ConcurrentHashMap<K,V>
1650      }
1651  
1652      /** Implementation for putAll */
1653 <    @SuppressWarnings("unchecked") private final void internalPutAll
1651 <        (Map<? extends K, ? extends V> m) {
1653 >    private final void internalPutAll(Map<? extends K, ? extends V> m) {
1654          tryPresize(m.size());
1655          long delta = 0L;     // number of uncommitted additions
1656          boolean npe = false; // to throw exception on exit for nulls
# Line 1661 | Line 1663 | public class ConcurrentHashMap<K,V>
1663                      break;
1664                  }
1665                  int h = spread(k.hashCode());
1666 <                for (Node<V>[] tab = table;;) {
1667 <                    int i; Node<V> f; int fh; Object fk;
1666 >                for (Node<K,V>[] tab = table;;) {
1667 >                    int i; Node<K,V> f; int fh; Object fk;
1668                      if (tab == null)
1669                          tab = initTable();
1670                      else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null){
1671 <                        if (casTabAt(tab, i, null, new Node<V>(h, k, v, null))) {
1671 >                        if (casTabAt(tab, i, null, new Node<K,V>(h, k, v, null))) {
1672                              ++delta;
1673                              break;
1674                          }
1675                      }
1676                      else if ((fh = f.hash) < 0) {
1677                          if ((fk = f.key) instanceof TreeBin) {
1678 <                            TreeBin<V> t = (TreeBin<V>)fk;
1678 >                            TreeBin<K,V> t = (TreeBin<K,V>)fk;
1679 >                            long stamp = t.writeLock();
1680                              boolean validated = false;
1678                            t.acquire(0);
1681                              try {
1682                                  if (tabAt(tab, i) == f) {
1683                                      validated = true;
1684 <                                    TreeNode<V> p = t.getTreeNode(h, k);
1684 >                                    Class<?> cc = comparableClassFor(k);
1685 >                                    TreeNode<K,V> p = t.getTreeNode(h, k,
1686 >                                                                    t.root, cc);
1687                                      if (p != null)
1688                                          p.val = v;
1689                                      else {
1686                                        t.putTreeNode(h, k, v);
1690                                          ++delta;
1691 +                                        t.putTreeNode(h, k, v);
1692                                      }
1693                                  }
1694                              } finally {
1695 <                                t.release(0);
1695 >                                t.unlockWrite(stamp);
1696                              }
1697                              if (validated)
1698                                  break;
1699                          }
1700                          else
1701 <                            tab = (Node<V>[])fk;
1701 >                            tab = (Node<K,V>[])fk;
1702                      }
1703                      else {
1704                          int len = 0;
1705                          synchronized (f) {
1706                              if (tabAt(tab, i) == f) {
1707                                  len = 1;
1708 <                                for (Node<V> e = f;; ++len) {
1709 <                                    Object ek; V ev;
1708 >                                for (Node<K,V> e = f;; ++len) {
1709 >                                    Object ek;
1710                                      if (e.hash == h &&
1707                                        (ev = e.val) != null &&
1711                                          ((ek = e.key) == k || k.equals(ek))) {
1712                                          e.val = v;
1713                                          break;
1714                                      }
1715 <                                    Node<V> last = e;
1715 >                                    Node<K,V> last = e;
1716                                      if ((e = e.next) == null) {
1717                                          ++delta;
1718 <                                        last.next = new Node<V>(h, k, v, null);
1719 <                                        if (len >= TREE_THRESHOLD)
1718 >                                        last.next = new Node<K,V>(h, k, v, null);
1719 >                                        if (len > TREE_THRESHOLD)
1720                                              replaceWithTreeBin(tab, i, k);
1721                                          break;
1722                                      }
# Line 1742 | Line 1745 | public class ConcurrentHashMap<K,V>
1745       * Implementation for clear. Steps through each bin, removing all
1746       * nodes.
1747       */
1748 <    @SuppressWarnings("unchecked") private final void internalClear() {
1748 >    private final void internalClear() {
1749          long delta = 0L; // negative number of deletions
1750          int i = 0;
1751 <        Node<V>[] tab = table;
1751 >        Node<K,V>[] tab = table;
1752          while (tab != null && i < tab.length) {
1753 <            Node<V> f = tabAt(tab, i);
1753 >            Node<K,V> f = tabAt(tab, i);
1754              if (f == null)
1755                  ++i;
1756              else if (f.hash < 0) {
1757                  Object fk;
1758                  if ((fk = f.key) instanceof TreeBin) {
1759 <                    TreeBin<V> t = (TreeBin<V>)fk;
1760 <                    t.acquire(0);
1759 >                    TreeBin<K,V> t = (TreeBin<K,V>)fk;
1760 >                    long stamp = t.writeLock();
1761                      try {
1762                          if (tabAt(tab, i) == f) {
1763 <                            for (Node<V> p = t.first; p != null; p = p.next) {
1764 <                                if (p.val != null) { // (currently always true)
1762 <                                    p.val = null;
1763 <                                    --delta;
1764 <                                }
1765 <                            }
1763 >                            for (Node<K,V> p = t.first; p != null; p = p.next)
1764 >                                --delta;
1765                              t.first = null;
1766                              t.root = null;
1767                              ++i;
1768                          }
1769                      } finally {
1770 <                        t.release(0);
1770 >                        t.unlockWrite(stamp);
1771                      }
1772                  }
1773                  else
1774 <                    tab = (Node<V>[])fk;
1774 >                    tab = (Node<K,V>[])fk;
1775              }
1776              else {
1777                  synchronized (f) {
1778                      if (tabAt(tab, i) == f) {
1779 <                        for (Node<V> e = f; e != null; e = e.next) {
1780 <                            if (e.val != null) {  // (currently always true)
1782 <                                e.val = null;
1783 <                                --delta;
1784 <                            }
1785 <                        }
1779 >                        for (Node<K,V> e = f; e != null; e = e.next)
1780 >                            --delta;
1781                          setTabAt(tab, i, null);
1782                          ++i;
1783                      }
# Line 1812 | Line 1807 | public class ConcurrentHashMap<K,V>
1807      /**
1808       * Initializes table, using the size recorded in sizeCtl.
1809       */
1810 <    @SuppressWarnings("unchecked") private final Node<V>[] initTable() {
1811 <        Node<V>[] tab; int sc;
1810 >    private final Node<K,V>[] initTable() {
1811 >        Node<K,V>[] tab; int sc;
1812          while ((tab = table) == null) {
1813              if ((sc = sizeCtl) < 0)
1814                  Thread.yield(); // lost initialization race; just spin
# Line 1821 | Line 1816 | public class ConcurrentHashMap<K,V>
1816                  try {
1817                      if ((tab = table) == null) {
1818                          int n = (sc > 0) ? sc : DEFAULT_CAPACITY;
1819 <                        @SuppressWarnings("rawtypes") Node[] tb = new Node[n];
1825 <                        table = tab = (Node<V>[])tb;
1819 >                        table = tab = (Node<K,V>[])new Node[n];
1820                          sc = n - (n >>> 2);
1821                      }
1822                  } finally {
# Line 1862 | Line 1856 | public class ConcurrentHashMap<K,V>
1856              s = sumCount();
1857          }
1858          if (check >= 0) {
1859 <            Node<V>[] tab, nt; int sc;
1859 >            Node<K,V>[] tab, nt; int sc;
1860              while (s >= (long)(sc = sizeCtl) && (tab = table) != null &&
1861                     tab.length < MAXIMUM_CAPACITY) {
1862                  if (sc < 0) {
# Line 1884 | Line 1878 | public class ConcurrentHashMap<K,V>
1878       *
1879       * @param size number of elements (doesn't need to be perfectly accurate)
1880       */
1881 <    @SuppressWarnings("unchecked") private final void tryPresize(int size) {
1881 >    private final void tryPresize(int size) {
1882          int c = (size >= (MAXIMUM_CAPACITY >>> 1)) ? MAXIMUM_CAPACITY :
1883              tableSizeFor(size + (size >>> 1) + 1);
1884          int sc;
1885          while ((sc = sizeCtl) >= 0) {
1886 <            Node<V>[] tab = table; int n;
1886 >            Node<K,V>[] tab = table; int n;
1887              if (tab == null || (n = tab.length) == 0) {
1888                  n = (sc > c) ? sc : c;
1889                  if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
1890                      try {
1891                          if (table == tab) {
1892 <                            @SuppressWarnings("rawtypes") Node[] tb = new Node[n];
1899 <                            table = (Node<V>[])tb;
1892 >                            table = (Node<K,V>[])new Node[n];
1893                              sc = n - (n >>> 2);
1894                          }
1895                      } finally {
# Line 1916 | Line 1909 | public class ConcurrentHashMap<K,V>
1909       * Moves and/or copies the nodes in each bin to new table. See
1910       * above for explanation.
1911       */
1912 <    @SuppressWarnings("unchecked") private final void transfer
1920 <        (Node<V>[] tab, Node<V>[] nextTab) {
1912 >    private final void transfer(Node<K,V>[] tab, Node<K,V>[] nextTab) {
1913          int n = tab.length, stride;
1914          if ((stride = (NCPU > 1) ? (n >>> 3) / NCPU : n) < MIN_TRANSFER_STRIDE)
1915              stride = MIN_TRANSFER_STRIDE; // subdivide range
1916          if (nextTab == null) {            // initiating
1917              try {
1918 <                @SuppressWarnings("rawtypes") Node[] tb = new Node[n << 1];
1927 <                nextTab = (Node<V>[])tb;
1918 >                nextTab = (Node<K,V>[])new Node[n << 1];
1919              } catch (Throwable ex) {      // try to cope with OOME
1920                  sizeCtl = Integer.MAX_VALUE;
1921                  return;
# Line 1932 | Line 1923 | public class ConcurrentHashMap<K,V>
1923              nextTable = nextTab;
1924              transferOrigin = n;
1925              transferIndex = n;
1926 <            Node<V> rev = new Node<V>(MOVED, tab, null, null);
1926 >            Node<K,V> rev = new Node<K,V>(MOVED, tab, null, null);
1927              for (int k = n; k > 0;) {    // progressively reveal ready slots
1928                  int nextk = (k > stride) ? k - stride : 0;
1929                  for (int m = nextk; m < k; ++m)
# Line 1943 | Line 1934 | public class ConcurrentHashMap<K,V>
1934              }
1935          }
1936          int nextn = nextTab.length;
1937 <        Node<V> fwd = new Node<V>(MOVED, nextTab, null, null);
1937 >        Node<K,V> fwd = new Node<K,V>(MOVED, nextTab, null, null);
1938          boolean advance = true;
1939          for (int i = 0, bound = 0;;) {
1940 <            int nextIndex, nextBound; Node<V> f; Object fk;
1940 >            int nextIndex, nextBound; Node<K,V> f; Object fk;
1941              while (advance) {
1942                  if (--i >= bound)
1943                      advance = false;
# Line 1986 | Line 1977 | public class ConcurrentHashMap<K,V>
1977                  synchronized (f) {
1978                      if (tabAt(tab, i) == f) {
1979                          int runBit = f.hash & n;
1980 <                        Node<V> lastRun = f, lo = null, hi = null;
1981 <                        for (Node<V> p = f.next; p != null; p = p.next) {
1980 >                        Node<K,V> lastRun = f, lo = null, hi = null;
1981 >                        for (Node<K,V> p = f.next; p != null; p = p.next) {
1982                              int b = p.hash & n;
1983                              if (b != runBit) {
1984                                  runBit = b;
# Line 1998 | Line 1989 | public class ConcurrentHashMap<K,V>
1989                              lo = lastRun;
1990                          else
1991                              hi = lastRun;
1992 <                        for (Node<V> p = f; p != lastRun; p = p.next) {
1993 <                            int ph = p.hash;
2003 <                            Object pk = p.key; V pv = p.val;
1992 >                        for (Node<K,V> p = f; p != lastRun; p = p.next) {
1993 >                            int ph = p.hash; Object pk = p.key; V pv = p.val;
1994                              if ((ph & n) == 0)
1995 <                                lo = new Node<V>(ph, pk, pv, lo);
1995 >                                lo = new Node<K,V>(ph, pk, pv, lo);
1996                              else
1997 <                                hi = new Node<V>(ph, pk, pv, hi);
1997 >                                hi = new Node<K,V>(ph, pk, pv, hi);
1998                          }
1999                          setTabAt(nextTab, i, lo);
2000                          setTabAt(nextTab, i + n, hi);
# Line 2014 | Line 2004 | public class ConcurrentHashMap<K,V>
2004                  }
2005              }
2006              else if ((fk = f.key) instanceof TreeBin) {
2007 <                TreeBin<V> t = (TreeBin<V>)fk;
2008 <                t.acquire(0);
2007 >                TreeBin<K,V> t = (TreeBin<K,V>)fk;
2008 >                long stamp = t.writeLock();
2009                  try {
2010                      if (tabAt(tab, i) == f) {
2011 <                        TreeBin<V> lt = new TreeBin<V>();
2012 <                        TreeBin<V> ht = new TreeBin<V>();
2013 <                        int lc = 0, hc = 0;
2014 <                        for (Node<V> e = t.first; e != null; e = e.next) {
2015 <                            int h = e.hash;
2016 <                            Object k = e.key; V v = e.val;
2017 <                            if ((h & n) == 0) {
2018 <                                ++lc;
2019 <                                lt.putTreeNode(h, k, v);
2011 >                        TreeNode<K,V> root;
2012 >                        Node<K,V> ln = null, hn = null;
2013 >                        if ((root = t.root) != null) {
2014 >                            Node<K,V> e, p; TreeNode<K,V> lr, rr; int lh;
2015 >                            TreeBin<K,V> lt = null, ht = null;
2016 >                            for (lr = root; lr.left != null; lr = lr.left);
2017 >                            for (rr = root; rr.right != null; rr = rr.right);
2018 >                            if ((lh = lr.hash) == rr.hash) { // move entire tree
2019 >                                if ((lh & n) == 0)
2020 >                                    lt = t;
2021 >                                else
2022 >                                    ht = t;
2023                              }
2024                              else {
2025 <                                ++hc;
2026 <                                ht.putTreeNode(h, k, v);
2025 >                                lt = new TreeBin<K,V>();
2026 >                                ht = new TreeBin<K,V>();
2027 >                                int lc = 0, hc = 0;
2028 >                                for (e = t.first; e != null; e = e.next) {
2029 >                                    int h = e.hash;
2030 >                                    Object k = e.key; V v = e.val;
2031 >                                    if ((h & n) == 0) {
2032 >                                        ++lc;
2033 >                                        lt.putTreeNode(h, k, v);
2034 >                                    }
2035 >                                    else {
2036 >                                        ++hc;
2037 >                                        ht.putTreeNode(h, k, v);
2038 >                                    }
2039 >                                }
2040 >                                if (lc < TREE_THRESHOLD) { // throw away
2041 >                                    for (p = lt.first; p != null; p = p.next)
2042 >                                        ln = new Node<K,V>(p.hash, p.key,
2043 >                                                           p.val, ln);
2044 >                                    lt = null;
2045 >                                }
2046 >                                if (hc < TREE_THRESHOLD) {
2047 >                                    for (p = ht.first; p != null; p = p.next)
2048 >                                        hn = new Node<K,V>(p.hash, p.key,
2049 >                                                           p.val, hn);
2050 >                                    ht = null;
2051 >                                }
2052                              }
2053 +                            if (ln == null && lt != null)
2054 +                                ln = new Node<K,V>(MOVED, lt, null, null);
2055 +                            if (hn == null && ht != null)
2056 +                                hn = new Node<K,V>(MOVED, ht, null, null);
2057                          }
2036                        Node<V> ln, hn; // throw away trees if too small
2037                        if (lc < TREE_THRESHOLD) {
2038                            ln = null;
2039                            for (Node<V> p = lt.first; p != null; p = p.next)
2040                                ln = new Node<V>(p.hash, p.key, p.val, ln);
2041                        }
2042                        else
2043                            ln = new Node<V>(MOVED, lt, null, null);
2058                          setTabAt(nextTab, i, ln);
2045                        if (hc < TREE_THRESHOLD) {
2046                            hn = null;
2047                            for (Node<V> p = ht.first; p != null; p = p.next)
2048                                hn = new Node<V>(p.hash, p.key, p.val, hn);
2049                        }
2050                        else
2051                            hn = new Node<V>(MOVED, ht, null, null);
2059                          setTabAt(nextTab, i + n, hn);
2060                          setTabAt(tab, i, fwd);
2061                          advance = true;
2062                      }
2063                  } finally {
2064 <                    t.release(0);
2064 >                    t.unlockWrite(stamp);
2065                  }
2066              }
2067              else
# Line 2162 | Line 2169 | public class ConcurrentHashMap<K,V>
2169  
2170      /**
2171       * Encapsulates traversal for methods such as containsValue; also
2172 <     * serves as a base class for other iterators and bulk tasks.
2166 <     *
2167 <     * At each step, the iterator snapshots the key ("nextKey") and
2168 <     * value ("nextVal") of a valid node (i.e., one that, at point of
2169 <     * snapshot, has a non-null user value). Because val fields can
2170 <     * change (including to null, indicating deletion), field nextVal
2171 <     * might not be accurate at point of use, but still maintains the
2172 <     * weak consistency property of holding a value that was once
2173 <     * valid. To support iterator.remove, the nextKey field is not
2174 <     * updated (nulled out) when the iterator cannot advance.
2175 <     *
2176 <     * Exported iterators must track whether the iterator has advanced
2177 <     * (in hasNext vs next) (by setting/checking/nulling field
2178 <     * nextVal), and then extract key, value, or key-value pairs as
2179 <     * return values of next().
2172 >     * serves as a base class for other iterators and spliterators.
2173       *
2174       * Method advance visits once each still-valid node that was
2175       * reachable upon iterator construction. It might miss some that
# Line 2195 | Line 2188 | public class ConcurrentHashMap<K,V>
2188       * across threads, iteration terminates if a bounds checks fails
2189       * for a table read.
2190       *
2198     * Methods advanceKey and advanceValue are specializations of the
2199     * common cases of advance, relaying to the full version
2200     * otherwise. The forEachKey and forEachValue methods further
2201     * specialize, bypassing all incremental field updates in most cases.
2202     *
2203     * This class supports both Spliterator-based traversal and
2204     * CountedCompleter-based bulk tasks. The same "batch" field is
2205     * used, but in slightly different ways, in the two cases.  For
2206     * Spliterators, it is a saturating (at Integer.MAX_VALUE)
2207     * estimate of element coverage. For CHM tasks, it is a pre-scaled
2208     * size that halves down to zero for leaf tasks, that is only
2209     * computed upon execution of the task. (Tasks can be submitted to
2210     * any pool, of any size, so we don't know scale factors until
2211     * running.)
2212     *
2213     * This class extends CountedCompleter to streamline parallel
2214     * iteration in bulk operations. This adds only a few fields of
2215     * space overhead, which is small enough in cases where it is not
2216     * needed to not worry about it.  Because CountedCompleter is
2217     * Serializable, but iterators need not be, we need to add warning
2218     * suppressions.
2191       */
2192 <    @SuppressWarnings("serial") static class Traverser<K,V,R>
2193 <        extends CountedCompleter<R> {
2194 <        final ConcurrentHashMap<K,V> map;
2195 <        Node<V> next;        // the next entry to use
2196 <        K nextKey;           // cached key field of next
2197 <        V nextVal;           // cached val field of next
2198 <        Node<V>[] tab;       // current table; updated if resized
2199 <        int index;           // index of bin to use next
2200 <        int baseIndex;       // current index of initial table
2201 <        int baseLimit;       // index bound for initial table
2202 <        final int baseSize;  // initial table size
2203 <        int batch;           // split control
2204 <
2205 <        /** Creates iterator for all entries in the table. */
2234 <        Traverser(ConcurrentHashMap<K,V> map) {
2235 <            this.map = map;
2236 <            Node<V>[] t = this.tab = map.table;
2237 <            baseLimit = baseSize = (t == null) ? 0 : t.length;
2192 >    static class Traverser<K,V> {
2193 >        Node<K,V>[] tab;        // current table; updated if resized
2194 >        Node<K,V> next;         // the next entry to use
2195 >        int index;              // index of bin to use next
2196 >        int baseIndex;          // current index of initial table
2197 >        int baseLimit;          // index bound for initial table
2198 >        final int baseSize;     // initial table size
2199 >
2200 >        Traverser(Node<K,V>[] tab, int size, int index, int limit) {
2201 >            this.tab = tab;
2202 >            this.baseSize = size;
2203 >            this.baseIndex = this.index = index;
2204 >            this.baseLimit = limit;
2205 >            this.next = null;
2206          }
2207  
2208 <        /** Task constructor */
2209 <        Traverser(ConcurrentHashMap<K,V> map, Traverser<K,V,?> it, int batch) {
2210 <            super(it);
2211 <            this.map = map;
2212 <            this.batch = batch; // -1 if unknown
2213 <            if (it == null) {
2214 <                Node<V>[] t = this.tab = map.table;
2215 <                baseLimit = baseSize = (t == null) ? 0 : t.length;
2216 <            }
2217 <            else { // split parent
2218 <                this.tab = it.tab;
2219 <                this.baseSize = it.baseSize;
2220 <                int hi = this.baseLimit = it.baseLimit;
2221 <                it.baseLimit = this.index = this.baseIndex =
2222 <                    (hi + it.baseIndex) >>> 1;
2208 >        /**
2209 >         * Advances if possible, returning next valid node, or null if none.
2210 >         */
2211 >        final Node<K,V> advance() {
2212 >            Node<K,V> e;
2213 >            if ((e = next) != null)
2214 >                e = e.next;
2215 >            for (;;) {
2216 >                Node<K,V>[] t; int i, n; Object ek;  // must use locals in checks
2217 >                if (e != null)
2218 >                    return next = e;
2219 >                if (baseIndex >= baseLimit || (t = tab) == null ||
2220 >                    (n = t.length) <= (i = index) || i < 0)
2221 >                    return next = null;
2222 >                if ((e = tabAt(t, index)) != null && e.hash < 0) {
2223 >                    if ((ek = e.key) instanceof TreeBin)
2224 >                        e = ((TreeBin<K,V>)ek).first;
2225 >                    else {
2226 >                        tab = (Node<K,V>[])ek;
2227 >                        e = null;
2228 >                        continue;
2229 >                    }
2230 >                }
2231 >                if ((index += baseSize) >= n)
2232 >                    index = ++baseIndex;    // visit upper slots if present
2233              }
2234          }
2235 +    }
2236  
2237 <        /** Spliterator constructor */
2238 <        Traverser(ConcurrentHashMap<K,V> map, Traverser<K,V,?> it) {
2239 <            super(it);
2237 >    /**
2238 >     * Base of key, value, and entry Iterators. Adds fields to
2239 >     * Traverser to support iterator.remove
2240 >     */
2241 >    static class BaseIterator<K,V> extends Traverser<K,V> {
2242 >        final ConcurrentHashMap<K,V> map;
2243 >        Node<K,V> lastReturned;
2244 >        BaseIterator(Node<K,V>[] tab, int size, int index, int limit,
2245 >                    ConcurrentHashMap<K,V> map) {
2246 >            super(tab, size, index, limit);
2247              this.map = map;
2248 <            if (it == null) {
2263 <                Node<V>[] t = this.tab = map.table;
2264 <                baseLimit = baseSize = (t == null) ? 0 : t.length;
2265 <                long n = map.sumCount();
2266 <                batch = ((n > (long)Integer.MAX_VALUE) ? Integer.MAX_VALUE :
2267 <                         (int)n);
2268 <            }
2269 <            else {
2270 <                this.tab = it.tab;
2271 <                this.baseSize = it.baseSize;
2272 <                int hi = this.baseLimit = it.baseLimit;
2273 <                it.baseLimit = this.index = this.baseIndex =
2274 <                    (hi + it.baseIndex) >>> 1;
2275 <                this.batch = it.batch >>>= 1;
2276 <            }
2248 >            advance();
2249          }
2250  
2251 <        /**
2252 <         * Advances if possible, returning next valid value, or null if none.
2253 <         */
2254 <        @SuppressWarnings("unchecked") final V advance() {
2255 <            for (Node<V> e = next;;) {
2256 <                if (e != null)                  // advance past used/skipped node
2257 <                    e = next = e.next;
2258 <                while (e == null) {             // get to next non-null bin
2259 <                    Node<V>[] t; int i, n;      // must use locals in checks
2288 <                    if (baseIndex >= baseLimit || (t = tab) == null ||
2289 <                        (n = t.length) <= (i = index) || i < 0)
2290 <                        return nextVal = null;
2291 <                    if ((e = next = tabAt(t, index)) != null && e.hash < 0) {
2292 <                        Object ek;
2293 <                        if ((ek = e.key) instanceof TreeBin)
2294 <                            e = ((TreeBin<V>)ek).first;
2295 <                        else {
2296 <                            tab = (Node<V>[])ek;
2297 <                            continue;           // restarts due to null val
2298 <                        }
2299 <                    }
2300 <                    if ((index += baseSize) >= n)
2301 <                        index = ++baseIndex;    // visit upper slots if present
2302 <                }
2303 <                nextKey = (K)e.key;
2304 <                if ((nextVal = e.val) != null) // skip deleted or special nodes
2305 <                    return nextVal;
2306 <            }
2251 >        public final boolean hasNext() { return next != null; }
2252 >        public final boolean hasMoreElements() { return next != null; }
2253 >
2254 >        public final void remove() {
2255 >            Node<K,V> p;
2256 >            if ((p = lastReturned) == null)
2257 >                throw new IllegalStateException();
2258 >            lastReturned = null;
2259 >            map.internalReplace((K)p.key, null, null);
2260          }
2261 +    }
2262  
2263 <        /**
2264 <         * Common case version for value traversal
2265 <         */
2266 <        @SuppressWarnings("unchecked") final V advanceValue() {
2267 <            outer: for (Node<V> e = next;;) {
2314 <                if (e == null || (e = e.next) == null) {
2315 <                    Node<V>[] t; int i, len, n; Object ek;
2316 <                    if ((t = tab) == null ||
2317 <                        baseSize != (len = t.length) ||
2318 <                        len < (n = baseLimit) ||
2319 <                        baseIndex != (i = index))
2320 <                        break;
2321 <                    do {
2322 <                        if (i < 0 || i >= n) {
2323 <                            index = baseIndex = n;
2324 <                            next = null;
2325 <                            return nextVal = null;
2326 <                        }
2327 <                        if ((e = tabAt(t, i)) != null && e.hash < 0) {
2328 <                            if ((ek = e.key) instanceof TreeBin)
2329 <                                e = ((TreeBin<V>)ek).first;
2330 <                            else {
2331 <                                index = baseIndex = i;
2332 <                                next = null;
2333 <                                tab = (Node<V>[])ek;
2334 <                                break outer;
2335 <                            }
2336 <                        }
2337 <                        ++i;
2338 <                    } while (e == null);
2339 <                    index = baseIndex = i;
2340 <                }
2341 <                V v;
2342 <                K k = (K)e.key;
2343 <                if ((v = e.val) != null) {
2344 <                    nextVal = v;
2345 <                    nextKey = k;
2346 <                    next = e;
2347 <                    return v;
2348 <                }
2349 <            }
2350 <            return advance();
2263 >    static final class KeyIterator<K,V> extends BaseIterator<K,V>
2264 >        implements Iterator<K>, Enumeration<K> {
2265 >        KeyIterator(Node<K,V>[] tab, int index, int size, int limit,
2266 >                    ConcurrentHashMap<K,V> map) {
2267 >            super(tab, index, size, limit, map);
2268          }
2269  
2270 <        /**
2271 <         * Common case version for key traversal
2272 <         */
2273 <        @SuppressWarnings("unchecked") final K advanceKey() {
2274 <            outer: for (Node<V> e = next;;) {
2275 <                if (e == null || (e = e.next) == null) {
2276 <                    Node<V>[] t; int i, len, n; Object ek;
2277 <                    if ((t = tab) == null ||
2278 <                        baseSize != (len = t.length) ||
2279 <                        len < (n = baseLimit) ||
2280 <                        baseIndex != (i = index))
2281 <                        break;
2282 <                    do {
2283 <                        if (i < 0 || i >= n) {
2284 <                            index = baseIndex = n;
2285 <                            next = null;
2286 <                            nextVal = null;
2287 <                            return null;
2288 <                        }
2289 <                        if ((e = tabAt(t, i)) != null && e.hash < 0) {
2290 <                            if ((ek = e.key) instanceof TreeBin)
2291 <                                e = ((TreeBin<V>)ek).first;
2292 <                            else {
2293 <                                index = baseIndex = i;
2294 <                                next = null;
2295 <                                tab = (Node<V>[])ek;
2296 <                                break outer;
2297 <                            }
2298 <                        }
2299 <                        ++i;
2300 <                    } while (e == null);
2301 <                    index = baseIndex = i;
2302 <                }
2303 <                V v;
2304 <                K k = (K)e.key;
2305 <                if ((v = e.val) != null) {
2306 <                    nextVal = v;
2307 <                    nextKey = k;
2308 <                    next = e;
2309 <                    return k;
2310 <                }
2311 <            }
2312 <            return (advance() == null) ? null : nextKey;
2270 >        public final K next() {
2271 >            Node<K,V> p;
2272 >            if ((p = next) == null)
2273 >                throw new NoSuchElementException();
2274 >            K k = (K)p.key;
2275 >            lastReturned = p;
2276 >            advance();
2277 >            return k;
2278 >        }
2279 >
2280 >        public final K nextElement() { return next(); }
2281 >    }
2282 >
2283 >    static final class ValueIterator<K,V> extends BaseIterator<K,V>
2284 >        implements Iterator<V>, Enumeration<V> {
2285 >        ValueIterator(Node<K,V>[] tab, int index, int size, int limit,
2286 >                      ConcurrentHashMap<K,V> map) {
2287 >            super(tab, index, size, limit, map);
2288 >        }
2289 >
2290 >        public final V next() {
2291 >            Node<K,V> p;
2292 >            if ((p = next) == null)
2293 >                throw new NoSuchElementException();
2294 >            V v = p.val;
2295 >            lastReturned = p;
2296 >            advance();
2297 >            return v;
2298 >        }
2299 >
2300 >        public final V nextElement() { return next(); }
2301 >    }
2302 >
2303 >    static final class EntryIterator<K,V> extends BaseIterator<K,V>
2304 >        implements Iterator<Map.Entry<K,V>> {
2305 >        EntryIterator(Node<K,V>[] tab, int index, int size, int limit,
2306 >                      ConcurrentHashMap<K,V> map) {
2307 >            super(tab, index, size, limit, map);
2308 >        }
2309 >
2310 >        public final Map.Entry<K,V> next() {
2311 >            Node<K,V> p;
2312 >            if ((p = next) == null)
2313 >                throw new NoSuchElementException();
2314 >            K k = (K)p.key;
2315 >            V v = p.val;
2316 >            lastReturned = p;
2317 >            advance();
2318 >            return new MapEntry<K,V>(k, v, map);
2319 >        }
2320 >    }
2321 >
2322 >    static final class KeySpliterator<K,V> extends Traverser<K,V>
2323 >        implements Spliterator<K> {
2324 >        long est;               // size estimate
2325 >        KeySpliterator(Node<K,V>[] tab, int size, int index, int limit,
2326 >                       long est) {
2327 >            super(tab, size, index, limit);
2328 >            this.est = est;
2329 >        }
2330 >
2331 >        public Spliterator<K> trySplit() {
2332 >            int i, f, h;
2333 >            return (h = ((i = baseIndex) + (f = baseLimit)) >>> 1) <= i ? null :
2334 >                new KeySpliterator<K,V>(tab, baseSize, baseLimit = h,
2335 >                                        f, est >>>= 1);
2336          }
2337  
2338 <        @SuppressWarnings("unchecked") final void forEachValue(Consumer<? super V> action) {
2338 >        public void forEachRemaining(Consumer<? super K> action) {
2339              if (action == null) throw new NullPointerException();
2340 <            Node<V>[] t; int i, len, n;
2341 <            if ((t = tab) != null && baseSize == (len = t.length) &&
2402 <                len >= (n = baseLimit) && baseIndex == (i = index)) {
2403 <                index = baseIndex = n;
2404 <                nextVal = null;
2405 <                Node<V> e = next;
2406 <                next = null;
2407 <                if (e != null)
2408 <                    e = e.next;
2409 <                outer: for (;; e = e.next) {
2410 <                    V v; Object ek;
2411 <                    for (; e == null; ++i) {
2412 <                        if (i < 0 || i >= n)
2413 <                            return;
2414 <                        if ((e = tabAt(t, i)) != null && e.hash < 0) {
2415 <                            if ((ek = e.key) instanceof TreeBin)
2416 <                                e = ((TreeBin<V>)ek).first;
2417 <                            else {
2418 <                                index = baseIndex = i;
2419 <                                tab = (Node<V>[])ek;
2420 <                                break outer;
2421 <                            }
2422 <                        }
2423 <                    }
2424 <                    if ((v = e.val) != null)
2425 <                        action.accept(v);
2426 <                }
2427 <            }
2428 <            V v;
2429 <            while ((v = advance()) != null)
2430 <                action.accept(v);
2340 >            for (Node<K,V> p; (p = advance()) != null;)
2341 >                action.accept((K)p.key);
2342          }
2343  
2344 <        @SuppressWarnings("unchecked") final void forEachKey(Consumer<? super K> action) {
2344 >        public boolean tryAdvance(Consumer<? super K> action) {
2345              if (action == null) throw new NullPointerException();
2346 <            Node<V>[] t; int i, len, n;
2347 <            if ((t = tab) != null && baseSize == (len = t.length) &&
2348 <                len >= (n = baseLimit) && baseIndex == (i = index)) {
2349 <                index = baseIndex = n;
2350 <                nextVal = null;
2440 <                Node<V> e = next;
2441 <                next = null;
2442 <                if (e != null)
2443 <                    e = e.next;
2444 <                outer: for (;; e = e.next) {
2445 <                    for (; e == null; ++i) {
2446 <                        if (i < 0 || i >= n)
2447 <                            return;
2448 <                        if ((e = tabAt(t, i)) != null && e.hash < 0) {
2449 <                            Object ek;
2450 <                            if ((ek = e.key) instanceof TreeBin)
2451 <                                e = ((TreeBin<V>)ek).first;
2452 <                            else {
2453 <                                index = baseIndex = i;
2454 <                                tab = (Node<V>[])ek;
2455 <                                break outer;
2456 <                            }
2457 <                        }
2458 <                    }
2459 <                    Object k = e.key;
2460 <                    if (e.val != null)
2461 <                        action.accept((K)k);
2462 <                }
2463 <            }
2464 <            while (advance() != null)
2465 <                action.accept(nextKey);
2346 >            Node<K,V> p;
2347 >            if ((p = advance()) == null)
2348 >                return false;
2349 >            action.accept((K)p.key);
2350 >            return true;
2351          }
2352  
2353 <        public final void remove() {
2354 <            K k = nextKey;
2355 <            if (k == null && (advanceValue() == null || (k = nextKey) == null))
2356 <                throw new IllegalStateException();
2357 <            map.internalReplace(k, null, null);
2353 >        public long estimateSize() { return est; }
2354 >
2355 >        public int characteristics() {
2356 >            return Spliterator.DISTINCT | Spliterator.CONCURRENT |
2357 >                Spliterator.NONNULL;
2358          }
2359 +    }
2360  
2361 <        public final boolean hasNext() {
2362 <            return nextVal != null || advanceValue() != null;
2361 >    static final class ValueSpliterator<K,V> extends Traverser<K,V>
2362 >        implements Spliterator<V> {
2363 >        long est;               // size estimate
2364 >        ValueSpliterator(Node<K,V>[] tab, int size, int index, int limit,
2365 >                         long est) {
2366 >            super(tab, size, index, limit);
2367 >            this.est = est;
2368 >        }
2369 >
2370 >        public Spliterator<V> trySplit() {
2371 >            int i, f, h;
2372 >            return (h = ((i = baseIndex) + (f = baseLimit)) >>> 1) <= i ? null :
2373 >                new ValueSpliterator<K,V>(tab, baseSize, baseLimit = h,
2374 >                                          f, est >>>= 1);
2375          }
2376  
2377 <        public final boolean hasMoreElements() { return hasNext(); }
2377 >        public void forEachRemaining(Consumer<? super V> action) {
2378 >            if (action == null) throw new NullPointerException();
2379 >            for (Node<K,V> p; (p = advance()) != null;)
2380 >                action.accept(p.val);
2381 >        }
2382  
2383 <        public void compute() { } // default no-op CountedCompleter body
2383 >        public boolean tryAdvance(Consumer<? super V> action) {
2384 >            if (action == null) throw new NullPointerException();
2385 >            Node<K,V> p;
2386 >            if ((p = advance()) == null)
2387 >                return false;
2388 >            action.accept(p.val);
2389 >            return true;
2390 >        }
2391  
2392 <        public long estimateSize() { return batch; }
2392 >        public long estimateSize() { return est; }
2393  
2394 <        /**
2395 <         * Returns a batch value > 0 if this task should (and must) be
2487 <         * split, if so, adding to pending count, and in any case
2488 <         * updating batch value. The initial batch value is approx
2489 <         * exp2 of the number of times (minus one) to split task by
2490 <         * two before executing leaf action. This value is faster to
2491 <         * compute and more convenient to use as a guide to splitting
2492 <         * than is the depth, since it is used while dividing by two
2493 <         * anyway.
2494 <         */
2495 <        final int preSplit() {
2496 <            int b; ForkJoinPool pool;
2497 <            if ((b = batch) < 0) { // force initialization
2498 <                int sp = (((pool = getPool()) == null) ?
2499 <                          ForkJoinPool.getCommonPoolParallelism() :
2500 <                          pool.getParallelism()) << 2; // slack of 4
2501 <                long n = map.sumCount();
2502 <                b = (n <= 0L) ? 0 : (n < (long)sp) ? (int)n : sp;
2503 <            }
2504 <            b = (b <= 1 || baseIndex >= baseLimit) ? 0 : (b >>> 1);
2505 <            if ((batch = b) > 0)
2506 <                addToPendingCount(1);
2507 <            return b;
2394 >        public int characteristics() {
2395 >            return Spliterator.CONCURRENT | Spliterator.NONNULL;
2396          }
2397      }
2398  
2399 +    static final class EntrySpliterator<K,V> extends Traverser<K,V>
2400 +        implements Spliterator<Map.Entry<K,V>> {
2401 +        final ConcurrentHashMap<K,V> map; // To export MapEntry
2402 +        long est;               // size estimate
2403 +        EntrySpliterator(Node<K,V>[] tab, int size, int index, int limit,
2404 +                         long est, ConcurrentHashMap<K,V> map) {
2405 +            super(tab, size, index, limit);
2406 +            this.map = map;
2407 +            this.est = est;
2408 +        }
2409 +
2410 +        public Spliterator<Map.Entry<K,V>> trySplit() {
2411 +            int i, f, h;
2412 +            return (h = ((i = baseIndex) + (f = baseLimit)) >>> 1) <= i ? null :
2413 +                new EntrySpliterator<K,V>(tab, baseSize, baseLimit = h,
2414 +                                          f, est >>>= 1, map);
2415 +        }
2416 +
2417 +        public void forEachRemaining(Consumer<? super Map.Entry<K,V>> action) {
2418 +            if (action == null) throw new NullPointerException();
2419 +            for (Node<K,V> p; (p = advance()) != null; )
2420 +                action.accept(new MapEntry<K,V>((K)p.key, p.val, map));
2421 +        }
2422 +
2423 +        public boolean tryAdvance(Consumer<? super Map.Entry<K,V>> action) {
2424 +            if (action == null) throw new NullPointerException();
2425 +            Node<K,V> p;
2426 +            if ((p = advance()) == null)
2427 +                return false;
2428 +            action.accept(new MapEntry<K,V>((K)p.key, p.val, map));
2429 +            return true;
2430 +        }
2431 +
2432 +        public long estimateSize() { return est; }
2433 +
2434 +        public int characteristics() {
2435 +            return Spliterator.DISTINCT | Spliterator.CONCURRENT |
2436 +                Spliterator.NONNULL;
2437 +        }
2438 +    }
2439 +
2440 +
2441      /* ---------------- Public operations -------------- */
2442  
2443      /**
# Line 2583 | Line 2513 | public class ConcurrentHashMap<K,V>
2513       * nonpositive
2514       */
2515      public ConcurrentHashMap(int initialCapacity,
2516 <                               float loadFactor, int concurrencyLevel) {
2516 >                             float loadFactor, int concurrencyLevel) {
2517          if (!(loadFactor > 0.0f) || initialCapacity < 0 || concurrencyLevel <= 0)
2518              throw new IllegalArgumentException();
2519          if (initialCapacity < concurrencyLevel)   // Use at least as many bins
# Line 2707 | Line 2637 | public class ConcurrentHashMap<K,V>
2637      public boolean containsValue(Object value) {
2638          if (value == null)
2639              throw new NullPointerException();
2640 <        V v;
2641 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
2642 <        while ((v = it.advanceValue()) != null) {
2643 <            if (v == value || value.equals(v))
2644 <                return true;
2640 >        Node<K,V>[] t;
2641 >        if ((t = table) != null) {
2642 >            Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
2643 >            for (Node<K,V> p; (p = it.advance()) != null; ) {
2644 >                V v;
2645 >                if ((v = p.val) == value || value.equals(v))
2646 >                    return true;
2647 >            }
2648          }
2649          return false;
2650      }
# Line 2797 | Line 2730 | public class ConcurrentHashMap<K,V>
2730       * @throws RuntimeException or Error if the mappingFunction does so,
2731       *         in which case the mapping is left unestablished
2732       */
2733 <    public V computeIfAbsent
2801 <        (K key, Function<? super K, ? extends V> mappingFunction) {
2733 >    public V computeIfAbsent(K key, Function<? super K, ? extends V> mappingFunction) {
2734          return internalComputeIfAbsent(key, mappingFunction);
2735      }
2736  
# Line 2822 | Line 2754 | public class ConcurrentHashMap<K,V>
2754       * @throws RuntimeException or Error if the remappingFunction does so,
2755       *         in which case the mapping is unchanged
2756       */
2757 <    public V computeIfPresent
2826 <        (K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
2757 >    public V computeIfPresent(K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
2758          return internalCompute(key, true, remappingFunction);
2759      }
2760  
# Line 2847 | Line 2778 | public class ConcurrentHashMap<K,V>
2778       * @throws RuntimeException or Error if the remappingFunction does so,
2779       *         in which case the mapping is unchanged
2780       */
2781 <    public V compute
2851 <        (K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
2781 >    public V compute(K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
2782          return internalCompute(key, false, remappingFunction);
2783      }
2784  
# Line 2872 | Line 2802 | public class ConcurrentHashMap<K,V>
2802       * @throws RuntimeException or Error if the remappingFunction does so,
2803       *         in which case the mapping is unchanged
2804       */
2805 <    public V merge
2876 <        (K key, V value,
2877 <         BiFunction<? super V, ? super V, ? extends V> remappingFunction) {
2805 >    public V merge(K key, V value, BiFunction<? super V, ? super V, ? extends V> remappingFunction) {
2806          return internalMerge(key, value, remappingFunction);
2807      }
2808  
# Line 2936 | Line 2864 | public class ConcurrentHashMap<K,V>
2864      /**
2865       * Returns a {@link Set} view of the keys contained in this map.
2866       * The set is backed by the map, so changes to the map are
2867 <     * reflected in the set, and vice-versa.
2867 >     * reflected in the set, and vice-versa. The set supports element
2868 >     * removal, which removes the corresponding mapping from this map,
2869 >     * via the <tt>Iterator.remove</tt>, <tt>Set.remove</tt>,
2870 >     * <tt>removeAll</tt>, <tt>retainAll</tt>, and <tt>clear</tt>
2871 >     * operations.  It does not support the <tt>add</tt> or
2872 >     * <tt>addAll</tt> operations.
2873 >     *
2874 >     * <p>The view's <tt>iterator</tt> is a "weakly consistent" iterator
2875 >     * that will never throw {@link ConcurrentModificationException},
2876 >     * and guarantees to traverse elements as they existed upon
2877 >     * construction of the iterator, and may (but is not guaranteed to)
2878 >     * reflect any modifications subsequent to construction.
2879       *
2880       * @return the set view
2881       */
# Line 2965 | Line 2904 | public class ConcurrentHashMap<K,V>
2904      /**
2905       * Returns a {@link Collection} view of the values contained in this map.
2906       * The collection is backed by the map, so changes to the map are
2907 <     * reflected in the collection, and vice-versa.
2907 >     * reflected in the collection, and vice-versa.  The collection
2908 >     * supports element removal, which removes the corresponding
2909 >     * mapping from this map, via the <tt>Iterator.remove</tt>,
2910 >     * <tt>Collection.remove</tt>, <tt>removeAll</tt>,
2911 >     * <tt>retainAll</tt>, and <tt>clear</tt> operations.  It does not
2912 >     * support the <tt>add</tt> or <tt>addAll</tt> operations.
2913 >     *
2914 >     * <p>The view's <tt>iterator</tt> is a "weakly consistent" iterator
2915 >     * that will never throw {@link ConcurrentModificationException},
2916 >     * and guarantees to traverse elements as they existed upon
2917 >     * construction of the iterator, and may (but is not guaranteed to)
2918 >     * reflect any modifications subsequent to construction.
2919       *
2920       * @return the collection view
2921       */
2922 <    public ValuesView<K,V> values() {
2922 >    public Collection<V> values() {
2923          ValuesView<K,V> vs = values;
2924          return (vs != null) ? vs : (values = new ValuesView<K,V>(this));
2925      }
# Line 2981 | Line 2931 | public class ConcurrentHashMap<K,V>
2931       * removal, which removes the corresponding mapping from the map,
2932       * via the {@code Iterator.remove}, {@code Set.remove},
2933       * {@code removeAll}, {@code retainAll}, and {@code clear}
2934 <     * operations.  It does not support the {@code add} or
2985 <     * {@code addAll} operations.
2934 >     * operations.
2935       *
2936       * <p>The view's {@code iterator} is a "weakly consistent" iterator
2937       * that will never throw {@link ConcurrentModificationException},
# Line 3004 | Line 2953 | public class ConcurrentHashMap<K,V>
2953       * @see #keySet()
2954       */
2955      public Enumeration<K> keys() {
2956 <        return new KeyIterator<K,V>(this);
2956 >        Node<K,V>[] t;
2957 >        int f = (t = table) == null ? 0 : t.length;
2958 >        return new KeyIterator<K,V>(t, f, 0, f, this);
2959      }
2960  
2961      /**
# Line 3014 | Line 2965 | public class ConcurrentHashMap<K,V>
2965       * @see #values()
2966       */
2967      public Enumeration<V> elements() {
2968 <        return new ValueIterator<K,V>(this);
2968 >        Node<K,V>[] t;
2969 >        int f = (t = table) == null ? 0 : t.length;
2970 >        return new ValueIterator<K,V>(t, f, 0, f, this);
2971      }
2972  
2973      /**
# Line 3026 | Line 2979 | public class ConcurrentHashMap<K,V>
2979       */
2980      public int hashCode() {
2981          int h = 0;
2982 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
2983 <        V v;
2984 <        while ((v = it.advanceValue()) != null) {
2985 <            h += it.nextKey.hashCode() ^ v.hashCode();
2982 >        Node<K,V>[] t;
2983 >        if ((t = table) != null) {
2984 >            Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
2985 >            for (Node<K,V> p; (p = it.advance()) != null; )
2986 >                h += p.key.hashCode() ^ p.val.hashCode();
2987          }
2988          return h;
2989      }
# Line 3046 | Line 3000 | public class ConcurrentHashMap<K,V>
3000       * @return a string representation of this map
3001       */
3002      public String toString() {
3003 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3003 >        Node<K,V>[] t;
3004 >        int f = (t = table) == null ? 0 : t.length;
3005 >        Traverser<K,V> it = new Traverser<K,V>(t, f, 0, f);
3006          StringBuilder sb = new StringBuilder();
3007          sb.append('{');
3008 <        V v;
3009 <        if ((v = it.advanceValue()) != null) {
3008 >        Node<K,V> p;
3009 >        if ((p = it.advance()) != null) {
3010              for (;;) {
3011 <                K k = it.nextKey;
3011 >                K k = (K)p.key;
3012 >                V v = p.val;
3013                  sb.append(k == this ? "(this Map)" : k);
3014                  sb.append('=');
3015                  sb.append(v == this ? "(this Map)" : v);
3016 <                if ((v = it.advanceValue()) == null)
3016 >                if ((p = it.advance()) == null)
3017                      break;
3018                  sb.append(',').append(' ');
3019              }
# Line 3079 | Line 3036 | public class ConcurrentHashMap<K,V>
3036              if (!(o instanceof Map))
3037                  return false;
3038              Map<?,?> m = (Map<?,?>) o;
3039 <            Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3040 <            V val;
3041 <            while ((val = it.advanceValue()) != null) {
3042 <                Object v = m.get(it.nextKey);
3039 >            Node<K,V>[] t;
3040 >            int f = (t = table) == null ? 0 : t.length;
3041 >            Traverser<K,V> it = new Traverser<K,V>(t, f, 0, f);
3042 >            for (Node<K,V> p; (p = it.advance()) != null; ) {
3043 >                V val = p.val;
3044 >                Object v = m.get(p.key);
3045                  if (v == null || (v != val && !v.equals(val)))
3046                      return false;
3047              }
# Line 3098 | Line 3057 | public class ConcurrentHashMap<K,V>
3057          return true;
3058      }
3059  
3101    /* ----------------Iterators -------------- */
3102
3103    @SuppressWarnings("serial") static final class KeyIterator<K,V>
3104        extends Traverser<K,V,Object>
3105        implements Spliterator<K>, Iterator<K>, Enumeration<K> {
3106        KeyIterator(ConcurrentHashMap<K,V> map) { super(map); }
3107        KeyIterator(ConcurrentHashMap<K,V> map, Traverser<K,V,Object> it) {
3108            super(map, it);
3109        }
3110        public Spliterator<K> trySplit() {
3111            return (baseLimit - baseIndex <= 1) ? null :
3112                new KeyIterator<K,V>(map, this);
3113        }
3114        public final K next() {
3115            K k;
3116            if ((k = (nextVal == null) ? advanceKey() : nextKey) == null)
3117                throw new NoSuchElementException();
3118            nextVal = null;
3119            return k;
3120        }
3121
3122        public final K nextElement() { return next(); }
3123
3124        public Iterator<K> iterator() { return this; }
3125
3126        public void forEachRemaining(Consumer<? super K> action) {
3127            forEachKey(action);
3128        }
3129
3130        public boolean tryAdvance(Consumer<? super K> block) {
3131            if (block == null) throw new NullPointerException();
3132            K k;
3133            if ((k = advanceKey()) == null)
3134                return false;
3135            block.accept(k);
3136            return true;
3137        }
3138
3139        public int characteristics() {
3140            return Spliterator.DISTINCT | Spliterator.CONCURRENT |
3141                Spliterator.NONNULL;
3142        }
3143
3144    }
3145
3146    @SuppressWarnings("serial") static final class ValueIterator<K,V>
3147        extends Traverser<K,V,Object>
3148        implements Spliterator<V>, Iterator<V>, Enumeration<V> {
3149        ValueIterator(ConcurrentHashMap<K,V> map) { super(map); }
3150        ValueIterator(ConcurrentHashMap<K,V> map, Traverser<K,V,Object> it) {
3151            super(map, it);
3152        }
3153        public Spliterator<V> trySplit() {
3154            return (baseLimit - baseIndex <= 1) ? null :
3155                new ValueIterator<K,V>(map, this);
3156        }
3157
3158        public final V next() {
3159            V v;
3160            if ((v = nextVal) == null && (v = advanceValue()) == null)
3161                throw new NoSuchElementException();
3162            nextVal = null;
3163            return v;
3164        }
3165
3166        public final V nextElement() { return next(); }
3167
3168        public Iterator<V> iterator() { return this; }
3169
3170        public void forEachRemaining(Consumer<? super V> action) {
3171            forEachValue(action);
3172        }
3173
3174        public boolean tryAdvance(Consumer<? super V> block) {
3175            V v;
3176            if (block == null) throw new NullPointerException();
3177            if ((v = advanceValue()) == null)
3178                return false;
3179            block.accept(v);
3180            return true;
3181        }
3182
3183        public int characteristics() {
3184            return Spliterator.CONCURRENT | Spliterator.NONNULL;
3185        }
3186    }
3187
3188    @SuppressWarnings("serial") static final class EntryIterator<K,V>
3189        extends Traverser<K,V,Object>
3190        implements Spliterator<Map.Entry<K,V>>, Iterator<Map.Entry<K,V>> {
3191        EntryIterator(ConcurrentHashMap<K,V> map) { super(map); }
3192        EntryIterator(ConcurrentHashMap<K,V> map, Traverser<K,V,Object> it) {
3193            super(map, it);
3194        }
3195        public Spliterator<Map.Entry<K,V>> trySplit() {
3196            return (baseLimit - baseIndex <= 1) ? null :
3197                new EntryIterator<K,V>(map, this);
3198        }
3199
3200        public final Map.Entry<K,V> next() {
3201            V v;
3202            if ((v = nextVal) == null && (v = advanceValue()) == null)
3203                throw new NoSuchElementException();
3204            K k = nextKey;
3205            nextVal = null;
3206            return new MapEntry<K,V>(k, v, map);
3207        }
3208
3209        public Iterator<Map.Entry<K,V>> iterator() { return this; }
3210
3211        public void forEachRemaining(Consumer<? super Map.Entry<K,V>> action) {
3212            if (action == null) throw new NullPointerException();
3213            V v;
3214            while ((v = advanceValue()) != null)
3215                action.accept(entryFor(nextKey, v));
3216        }
3217
3218        public boolean tryAdvance(Consumer<? super Map.Entry<K,V>> block) {
3219            V v;
3220            if (block == null) throw new NullPointerException();
3221            if ((v = advanceValue()) == null)
3222                return false;
3223            block.accept(entryFor(nextKey, v));
3224            return true;
3225        }
3226
3227        public int characteristics() {
3228            return Spliterator.DISTINCT | Spliterator.CONCURRENT |
3229                Spliterator.NONNULL;
3230        }
3231    }
3232
3233    /**
3234     * Exported Entry for iterators
3235     */
3236    static final class MapEntry<K,V> implements Map.Entry<K,V> {
3237        final K key; // non-null
3238        V val;       // non-null
3239        final ConcurrentHashMap<K,V> map;
3240        MapEntry(K key, V val, ConcurrentHashMap<K,V> map) {
3241            this.key = key;
3242            this.val = val;
3243            this.map = map;
3244        }
3245        public final K getKey()       { return key; }
3246        public final V getValue()     { return val; }
3247        public final int hashCode()   { return key.hashCode() ^ val.hashCode(); }
3248        public final String toString(){ return key + "=" + val; }
3249
3250        public final boolean equals(Object o) {
3251            Object k, v; Map.Entry<?,?> e;
3252            return ((o instanceof Map.Entry) &&
3253                    (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
3254                    (v = e.getValue()) != null &&
3255                    (k == key || k.equals(key)) &&
3256                    (v == val || v.equals(val)));
3257        }
3258
3259        /**
3260         * Sets our entry's value and writes through to the map. The
3261         * value to return is somewhat arbitrary here. Since we do not
3262         * necessarily track asynchronous changes, the most recent
3263         * "previous" value could be different from what we return (or
3264         * could even have been removed in which case the put will
3265         * re-establish). We do not and cannot guarantee more.
3266         */
3267        public final V setValue(V value) {
3268            if (value == null) throw new NullPointerException();
3269            V v = val;
3270            val = value;
3271            map.put(key, value);
3272            return v;
3273        }
3274    }
3275
3276    /**
3277     * Returns exportable snapshot entry for the given key and value
3278     * when write-through can't or shouldn't be used.
3279     */
3280    static <K,V> AbstractMap.SimpleEntry<K,V> entryFor(K k, V v) {
3281        return new AbstractMap.SimpleEntry<K,V>(k, v);
3282    }
3283
3060      /* ---------------- Serialization Support -------------- */
3061  
3062      /**
# Line 3302 | Line 3078 | public class ConcurrentHashMap<K,V>
3078       * for each key-value mapping, followed by a null pair.
3079       * The key-value mappings are emitted in no particular order.
3080       */
3081 <    @SuppressWarnings("unchecked") private void writeObject
3306 <        (java.io.ObjectOutputStream s)
3081 >    private void writeObject(java.io.ObjectOutputStream s)
3082          throws java.io.IOException {
3083          // For serialization compatibility
3084          // Emulate segment calculation from previous version of this class
# Line 3322 | Line 3097 | public class ConcurrentHashMap<K,V>
3097          s.putFields().put("segments", segments);
3098          s.putFields().put("segmentShift", segmentShift);
3099          s.putFields().put("segmentMask", segmentMask);
3325
3100          s.writeFields();
3101 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3102 <        V v;
3103 <        while ((v = it.advanceValue()) != null) {
3104 <            s.writeObject(it.nextKey);
3105 <            s.writeObject(v);
3101 >
3102 >        Node<K,V>[] t;
3103 >        if ((t = table) != null) {
3104 >            Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
3105 >            for (Node<K,V> p; (p = it.advance()) != null; ) {
3106 >                s.writeObject(p.key);
3107 >                s.writeObject(p.val);
3108 >            }
3109          }
3110          s.writeObject(null);
3111          s.writeObject(null);
# Line 3339 | Line 3116 | public class ConcurrentHashMap<K,V>
3116       * Reconstitutes the instance from a stream (that is, deserializes it).
3117       * @param s the stream
3118       */
3119 <    @SuppressWarnings("unchecked") private void readObject
3343 <        (java.io.ObjectInputStream s)
3119 >    private void readObject(java.io.ObjectInputStream s)
3120          throws java.io.IOException, ClassNotFoundException {
3121          s.defaultReadObject();
3122  
3123          // Create all nodes, then place in table once size is known
3124          long size = 0L;
3125 <        Node<V> p = null;
3125 >        Node<K,V> p = null;
3126          for (;;) {
3127              K k = (K) s.readObject();
3128              V v = (V) s.readObject();
3129              if (k != null && v != null) {
3130                  int h = spread(k.hashCode());
3131 <                p = new Node<V>(h, k, v, p);
3131 >                p = new Node<K,V>(h, k, v, p);
3132                  ++size;
3133              }
3134              else
# Line 3374 | Line 3150 | public class ConcurrentHashMap<K,V>
3150                  try {
3151                      if (table == null) {
3152                          init = true;
3153 <                        @SuppressWarnings("rawtypes") Node[] rt = new Node[n];
3378 <                        Node<V>[] tab = (Node<V>[])rt;
3153 >                        Node<K,V>[] tab = (Node<K,V>[])new Node[n];
3154                          int mask = n - 1;
3155                          while (p != null) {
3156                              int j = p.hash & mask;
3157 <                            Node<V> next = p.next;
3158 <                            Node<V> q = p.next = tabAt(tab, j);
3157 >                            Node<K,V> next = p.next;
3158 >                            Node<K,V> q = p.next = tabAt(tab, j);
3159                              setTabAt(tab, j, p);
3160                              if (!collide && q != null && q.hash == p.hash)
3161                                  collide = true;
# Line 3394 | Line 3169 | public class ConcurrentHashMap<K,V>
3169                      sizeCtl = sc;
3170                  }
3171                  if (collide) { // rescan and convert to TreeBins
3172 <                    Node<V>[] tab = table;
3172 >                    Node<K,V>[] tab = table;
3173                      for (int i = 0; i < tab.length; ++i) {
3174                          int c = 0;
3175 <                        for (Node<V> e = tabAt(tab, i); e != null; e = e.next) {
3175 >                        for (Node<K,V> e = tabAt(tab, i); e != null; e = e.next) {
3176                              if (++c > TREE_THRESHOLD &&
3177                                  (e.key instanceof Comparable)) {
3178                                  replaceWithTreeBin(tab, i, e.key);
# Line 3418 | Line 3193 | public class ConcurrentHashMap<K,V>
3193  
3194      // -------------------------------------------------------
3195  
3196 <    // Sequential bulk operations
3196 >    // Overrides of other default Map methods
3197 >
3198 >    public void forEach(BiConsumer<? super K, ? super V> action) {
3199 >        if (action == null) throw new NullPointerException();
3200 >        Node<K,V>[] t;
3201 >        if ((t = table) != null) {
3202 >            Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
3203 >            for (Node<K,V> p; (p = it.advance()) != null; ) {
3204 >                action.accept((K)p.key, p.val);
3205 >            }
3206 >        }
3207 >    }
3208 >
3209 >    public void replaceAll(BiFunction<? super K, ? super V, ? extends V> function) {
3210 >        if (function == null) throw new NullPointerException();
3211 >        Node<K,V>[] t;
3212 >        if ((t = table) != null) {
3213 >            Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
3214 >            for (Node<K,V> p; (p = it.advance()) != null; ) {
3215 >                K k = (K)p.key;
3216 >                internalPut(k, function.apply(k, p.val), false);
3217 >            }
3218 >        }
3219 >    }
3220 >
3221 >    // -------------------------------------------------------
3222 >
3223 >    // Parallel bulk operations
3224 >
3225 >    /**
3226 >     * Computes initial batch value for bulk tasks. The returned value
3227 >     * is approximately exp2 of the number of times (minus one) to
3228 >     * split task by two before executing leaf action. This value is
3229 >     * faster to compute and more convenient to use as a guide to
3230 >     * splitting than is the depth, since it is used while dividing by
3231 >     * two anyway.
3232 >     */
3233 >    final int batchFor(long b) {
3234 >        long n;
3235 >        if (b == Long.MAX_VALUE || (n = sumCount()) <= 1L || n < b)
3236 >            return 0;
3237 >        int sp = ForkJoinPool.getCommonPoolParallelism() << 2; // slack of 4
3238 >        return (b <= 0L || (n /= b) >= sp) ? sp : (int)n;
3239 >    }
3240  
3241      /**
3242       * Performs the given action for each (key, value).
3243       *
3244 +     * @param parallelismThreshold the (estimated) number of elements
3245 +     * needed for this operation to be executed in parallel.
3246       * @param action the action
3247       */
3248 <    public void forEachSequentially
3249 <        (BiConsumer<? super K, ? super V> action) {
3248 >    public void forEach(long parallelismThreshold,
3249 >                        BiConsumer<? super K,? super V> action) {
3250          if (action == null) throw new NullPointerException();
3251 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3252 <        V v;
3253 <        while ((v = it.advanceValue()) != null)
3434 <            action.accept(it.nextKey, v);
3251 >        new ForEachMappingTask<K,V>
3252 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3253 >             action).invoke();
3254      }
3255  
3256      /**
3257       * Performs the given action for each non-null transformation
3258       * of each (key, value).
3259       *
3260 +     * @param parallelismThreshold the (estimated) number of elements
3261 +     * needed for this operation to be executed in parallel.
3262       * @param transformer a function returning the transformation
3263       * for an element, or null if there is no transformation (in
3264       * which case the action is not applied)
3265       * @param action the action
3266       */
3267 <    public <U> void forEachSequentially
3268 <        (BiFunction<? super K, ? super V, ? extends U> transformer,
3269 <         Consumer<? super U> action) {
3267 >    public <U> void forEach(long parallelismThreshold,
3268 >                            BiFunction<? super K, ? super V, ? extends U> transformer,
3269 >                            Consumer<? super U> action) {
3270          if (transformer == null || action == null)
3271              throw new NullPointerException();
3272 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3273 <        V v; U u;
3274 <        while ((v = it.advanceValue()) != null) {
3454 <            if ((u = transformer.apply(it.nextKey, v)) != null)
3455 <                action.accept(u);
3456 <        }
3272 >        new ForEachTransformedMappingTask<K,V,U>
3273 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3274 >             transformer, action).invoke();
3275      }
3276  
3277      /**
3278       * Returns a non-null result from applying the given search
3279 <     * function on each (key, value), or null if none.
3279 >     * function on each (key, value), or null if none.  Upon
3280 >     * success, further element processing is suppressed and the
3281 >     * results of any other parallel invocations of the search
3282 >     * function are ignored.
3283       *
3284 +     * @param parallelismThreshold the (estimated) number of elements
3285 +     * needed for this operation to be executed in parallel.
3286       * @param searchFunction a function returning a non-null
3287       * result on success, else null
3288       * @return a non-null result from applying the given search
3289       * function on each (key, value), or null if none
3290       */
3291 <    public <U> U searchSequentially
3292 <        (BiFunction<? super K, ? super V, ? extends U> searchFunction) {
3291 >    public <U> U search(long parallelismThreshold,
3292 >                        BiFunction<? super K, ? super V, ? extends U> searchFunction) {
3293          if (searchFunction == null) throw new NullPointerException();
3294 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3295 <        V v; U u;
3296 <        while ((v = it.advanceValue()) != null) {
3474 <            if ((u = searchFunction.apply(it.nextKey, v)) != null)
3475 <                return u;
3476 <        }
3477 <        return null;
3294 >        return new SearchMappingsTask<K,V,U>
3295 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3296 >             searchFunction, new AtomicReference<U>()).invoke();
3297      }
3298  
3299      /**
# Line 3482 | Line 3301 | public class ConcurrentHashMap<K,V>
3301       * of all (key, value) pairs using the given reducer to
3302       * combine values, or null if none.
3303       *
3304 +     * @param parallelismThreshold the (estimated) number of elements
3305 +     * needed for this operation to be executed in parallel.
3306       * @param transformer a function returning the transformation
3307       * for an element, or null if there is no transformation (in
3308       * which case it is not combined)
# Line 3489 | Line 3310 | public class ConcurrentHashMap<K,V>
3310       * @return the result of accumulating the given transformation
3311       * of all (key, value) pairs
3312       */
3313 <    public <U> U reduceSequentially
3314 <        (BiFunction<? super K, ? super V, ? extends U> transformer,
3315 <         BiFunction<? super U, ? super U, ? extends U> reducer) {
3313 >    public <U> U reduce(long parallelismThreshold,
3314 >                        BiFunction<? super K, ? super V, ? extends U> transformer,
3315 >                        BiFunction<? super U, ? super U, ? extends U> reducer) {
3316          if (transformer == null || reducer == null)
3317              throw new NullPointerException();
3318 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3319 <        U r = null, u; V v;
3320 <        while ((v = it.advanceValue()) != null) {
3500 <            if ((u = transformer.apply(it.nextKey, v)) != null)
3501 <                r = (r == null) ? u : reducer.apply(r, u);
3502 <        }
3503 <        return r;
3318 >        return new MapReduceMappingsTask<K,V,U>
3319 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3320 >             null, transformer, reducer).invoke();
3321      }
3322  
3323      /**
# Line 3508 | Line 3325 | public class ConcurrentHashMap<K,V>
3325       * of all (key, value) pairs using the given reducer to
3326       * combine values, and the given basis as an identity value.
3327       *
3328 +     * @param parallelismThreshold the (estimated) number of elements
3329 +     * needed for this operation to be executed in parallel.
3330       * @param transformer a function returning the transformation
3331       * for an element
3332       * @param basis the identity (initial default value) for the reduction
# Line 3515 | Line 3334 | public class ConcurrentHashMap<K,V>
3334       * @return the result of accumulating the given transformation
3335       * of all (key, value) pairs
3336       */
3337 <    public double reduceToDoubleSequentially
3338 <        (ToDoubleBiFunction<? super K, ? super V> transformer,
3339 <         double basis,
3340 <         DoubleBinaryOperator reducer) {
3337 >    public double reduceToDoubleIn(long parallelismThreshold,
3338 >                                   ToDoubleBiFunction<? super K, ? super V> transformer,
3339 >                                   double basis,
3340 >                                   DoubleBinaryOperator reducer) {
3341          if (transformer == null || reducer == null)
3342              throw new NullPointerException();
3343 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3344 <        double r = basis; V v;
3345 <        while ((v = it.advanceValue()) != null)
3527 <            r = reducer.applyAsDouble(r, transformer.applyAsDouble(it.nextKey, v));
3528 <        return r;
3343 >        return new MapReduceMappingsToDoubleTask<K,V>
3344 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3345 >             null, transformer, basis, reducer).invoke();
3346      }
3347  
3348      /**
# Line 3533 | Line 3350 | public class ConcurrentHashMap<K,V>
3350       * of all (key, value) pairs using the given reducer to
3351       * combine values, and the given basis as an identity value.
3352       *
3353 +     * @param parallelismThreshold the (estimated) number of elements
3354 +     * needed for this operation to be executed in parallel.
3355       * @param transformer a function returning the transformation
3356       * for an element
3357       * @param basis the identity (initial default value) for the reduction
# Line 3540 | Line 3359 | public class ConcurrentHashMap<K,V>
3359       * @return the result of accumulating the given transformation
3360       * of all (key, value) pairs
3361       */
3362 <    public long reduceToLongSequentially
3363 <        (ToLongBiFunction<? super K, ? super V> transformer,
3364 <         long basis,
3365 <         LongBinaryOperator reducer) {
3362 >    public long reduceToLong(long parallelismThreshold,
3363 >                             ToLongBiFunction<? super K, ? super V> transformer,
3364 >                             long basis,
3365 >                             LongBinaryOperator reducer) {
3366          if (transformer == null || reducer == null)
3367              throw new NullPointerException();
3368 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3369 <        long r = basis; V v;
3370 <        while ((v = it.advanceValue()) != null)
3552 <            r = reducer.applyAsLong(r, transformer.applyAsLong(it.nextKey, v));
3553 <        return r;
3368 >        return new MapReduceMappingsToLongTask<K,V>
3369 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3370 >             null, transformer, basis, reducer).invoke();
3371      }
3372  
3373      /**
# Line 3558 | Line 3375 | public class ConcurrentHashMap<K,V>
3375       * of all (key, value) pairs using the given reducer to
3376       * combine values, and the given basis as an identity value.
3377       *
3378 +     * @param parallelismThreshold the (estimated) number of elements
3379 +     * needed for this operation to be executed in parallel.
3380       * @param transformer a function returning the transformation
3381       * for an element
3382       * @param basis the identity (initial default value) for the reduction
# Line 3565 | Line 3384 | public class ConcurrentHashMap<K,V>
3384       * @return the result of accumulating the given transformation
3385       * of all (key, value) pairs
3386       */
3387 <    public int reduceToIntSequentially
3388 <        (ToIntBiFunction<? super K, ? super V> transformer,
3389 <         int basis,
3390 <         IntBinaryOperator reducer) {
3387 >    public int reduceToInt(long parallelismThreshold,
3388 >                           ToIntBiFunction<? super K, ? super V> transformer,
3389 >                           int basis,
3390 >                           IntBinaryOperator reducer) {
3391          if (transformer == null || reducer == null)
3392              throw new NullPointerException();
3393 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3394 <        int r = basis; V v;
3395 <        while ((v = it.advanceValue()) != null)
3577 <            r = reducer.applyAsInt(r, transformer.applyAsInt(it.nextKey, v));
3578 <        return r;
3393 >        return new MapReduceMappingsToIntTask<K,V>
3394 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3395 >             null, transformer, basis, reducer).invoke();
3396      }
3397  
3398      /**
3399       * Performs the given action for each key.
3400       *
3401 +     * @param parallelismThreshold the (estimated) number of elements
3402 +     * needed for this operation to be executed in parallel.
3403       * @param action the action
3404       */
3405 <    public void forEachKeySequentially
3406 <        (Consumer<? super K> action) {
3407 <        new Traverser<K,V,Object>(this).forEachKey(action);
3405 >    public void forEachKey(long parallelismThreshold,
3406 >                           Consumer<? super K> action) {
3407 >        if (action == null) throw new NullPointerException();
3408 >        new ForEachKeyTask<K,V>
3409 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3410 >             action).invoke();
3411      }
3412  
3413      /**
3414       * Performs the given action for each non-null transformation
3415       * of each key.
3416       *
3417 +     * @param parallelismThreshold the (estimated) number of elements
3418 +     * needed for this operation to be executed in parallel.
3419       * @param transformer a function returning the transformation
3420       * for an element, or null if there is no transformation (in
3421       * which case the action is not applied)
3422       * @param action the action
3423       */
3424 <    public <U> void forEachKeySequentially
3425 <        (Function<? super K, ? extends U> transformer,
3426 <         Consumer<? super U> action) {
3424 >    public <U> void forEachKey(long parallelismThreshold,
3425 >                               Function<? super K, ? extends U> transformer,
3426 >                               Consumer<? super U> action) {
3427          if (transformer == null || action == null)
3428              throw new NullPointerException();
3429 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3430 <        K k; U u;
3431 <        while ((k = it.advanceKey()) != null) {
3608 <            if ((u = transformer.apply(k)) != null)
3609 <                action.accept(u);
3610 <        }
3429 >        new ForEachTransformedKeyTask<K,V,U>
3430 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3431 >             transformer, action).invoke();
3432      }
3433  
3434      /**
3435       * Returns a non-null result from applying the given search
3436 <     * function on each key, or null if none.
3436 >     * function on each key, or null if none. Upon success,
3437 >     * further element processing is suppressed and the results of
3438 >     * any other parallel invocations of the search function are
3439 >     * ignored.
3440       *
3441 +     * @param parallelismThreshold the (estimated) number of elements
3442 +     * needed for this operation to be executed in parallel.
3443       * @param searchFunction a function returning a non-null
3444       * result on success, else null
3445       * @return a non-null result from applying the given search
3446       * function on each key, or null if none
3447       */
3448 <    public <U> U searchKeysSequentially
3449 <        (Function<? super K, ? extends U> searchFunction) {
3450 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3451 <        K k; U u;
3452 <        while ((k = it.advanceKey()) != null) {
3453 <            if ((u = searchFunction.apply(k)) != null)
3628 <                return u;
3629 <        }
3630 <        return null;
3448 >    public <U> U searchKeys(long parallelismThreshold,
3449 >                            Function<? super K, ? extends U> searchFunction) {
3450 >        if (searchFunction == null) throw new NullPointerException();
3451 >        return new SearchKeysTask<K,V,U>
3452 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3453 >             searchFunction, new AtomicReference<U>()).invoke();
3454      }
3455  
3456      /**
3457       * Returns the result of accumulating all keys using the given
3458       * reducer to combine values, or null if none.
3459       *
3460 +     * @param parallelismThreshold the (estimated) number of elements
3461 +     * needed for this operation to be executed in parallel.
3462       * @param reducer a commutative associative combining function
3463       * @return the result of accumulating all keys using the given
3464       * reducer to combine values, or null if none
3465       */
3466 <    public K reduceKeysSequentially
3467 <        (BiFunction<? super K, ? super K, ? extends K> reducer) {
3466 >    public K reduceKeys(long parallelismThreshold,
3467 >                        BiFunction<? super K, ? super K, ? extends K> reducer) {
3468          if (reducer == null) throw new NullPointerException();
3469 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3470 <        K u, r = null;
3471 <        while ((u = it.advanceKey()) != null) {
3647 <            r = (r == null) ? u : reducer.apply(r, u);
3648 <        }
3649 <        return r;
3469 >        return new ReduceKeysTask<K,V>
3470 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3471 >             null, reducer).invoke();
3472      }
3473  
3474      /**
# Line 3654 | Line 3476 | public class ConcurrentHashMap<K,V>
3476       * of all keys using the given reducer to combine values, or
3477       * null if none.
3478       *
3479 +     * @param parallelismThreshold the (estimated) number of elements
3480 +     * needed for this operation to be executed in parallel.
3481       * @param transformer a function returning the transformation
3482       * for an element, or null if there is no transformation (in
3483       * which case it is not combined)
# Line 3661 | Line 3485 | public class ConcurrentHashMap<K,V>
3485       * @return the result of accumulating the given transformation
3486       * of all keys
3487       */
3488 <    public <U> U reduceKeysSequentially
3489 <        (Function<? super K, ? extends U> transformer,
3488 >    public <U> U reduceKeys(long parallelismThreshold,
3489 >                            Function<? super K, ? extends U> transformer,
3490           BiFunction<? super U, ? super U, ? extends U> reducer) {
3491          if (transformer == null || reducer == null)
3492              throw new NullPointerException();
3493 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3494 <        K k; U r = null, u;
3495 <        while ((k = it.advanceKey()) != null) {
3672 <            if ((u = transformer.apply(k)) != null)
3673 <                r = (r == null) ? u : reducer.apply(r, u);
3674 <        }
3675 <        return r;
3493 >        return new MapReduceKeysTask<K,V,U>
3494 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3495 >             null, transformer, reducer).invoke();
3496      }
3497  
3498      /**
# Line 3680 | Line 3500 | public class ConcurrentHashMap<K,V>
3500       * of all keys using the given reducer to combine values, and
3501       * the given basis as an identity value.
3502       *
3503 +     * @param parallelismThreshold the (estimated) number of elements
3504 +     * needed for this operation to be executed in parallel.
3505       * @param transformer a function returning the transformation
3506       * for an element
3507       * @param basis the identity (initial default value) for the reduction
# Line 3687 | Line 3509 | public class ConcurrentHashMap<K,V>
3509       * @return the result of accumulating the given transformation
3510       * of all keys
3511       */
3512 <    public double reduceKeysToDoubleSequentially
3513 <        (ToDoubleFunction<? super K> transformer,
3514 <         double basis,
3515 <         DoubleBinaryOperator reducer) {
3512 >    public double reduceKeysToDouble(long parallelismThreshold,
3513 >                                     ToDoubleFunction<? super K> transformer,
3514 >                                     double basis,
3515 >                                     DoubleBinaryOperator reducer) {
3516          if (transformer == null || reducer == null)
3517              throw new NullPointerException();
3518 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3519 <        double r = basis;
3520 <        K k;
3699 <        while ((k = it.advanceKey()) != null)
3700 <            r = reducer.applyAsDouble(r, transformer.applyAsDouble(k));
3701 <        return r;
3518 >        return new MapReduceKeysToDoubleTask<K,V>
3519 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3520 >             null, transformer, basis, reducer).invoke();
3521      }
3522  
3523      /**
# Line 3706 | Line 3525 | public class ConcurrentHashMap<K,V>
3525       * of all keys using the given reducer to combine values, and
3526       * the given basis as an identity value.
3527       *
3528 +     * @param parallelismThreshold the (estimated) number of elements
3529 +     * needed for this operation to be executed in parallel.
3530       * @param transformer a function returning the transformation
3531       * for an element
3532       * @param basis the identity (initial default value) for the reduction
# Line 3713 | Line 3534 | public class ConcurrentHashMap<K,V>
3534       * @return the result of accumulating the given transformation
3535       * of all keys
3536       */
3537 <    public long reduceKeysToLongSequentially
3538 <        (ToLongFunction<? super K> transformer,
3539 <         long basis,
3540 <         LongBinaryOperator reducer) {
3537 >    public long reduceKeysToLong(long parallelismThreshold,
3538 >                                 ToLongFunction<? super K> transformer,
3539 >                                 long basis,
3540 >                                 LongBinaryOperator reducer) {
3541          if (transformer == null || reducer == null)
3542              throw new NullPointerException();
3543 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3544 <        long r = basis;
3545 <        K k;
3725 <        while ((k = it.advanceKey()) != null)
3726 <            r = reducer.applyAsLong(r, transformer.applyAsLong(k));
3727 <        return r;
3543 >        return new MapReduceKeysToLongTask<K,V>
3544 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3545 >             null, transformer, basis, reducer).invoke();
3546      }
3547  
3548      /**
# Line 3732 | Line 3550 | public class ConcurrentHashMap<K,V>
3550       * of all keys using the given reducer to combine values, and
3551       * the given basis as an identity value.
3552       *
3553 +     * @param parallelismThreshold the (estimated) number of elements
3554 +     * needed for this operation to be executed in parallel.
3555       * @param transformer a function returning the transformation
3556       * for an element
3557       * @param basis the identity (initial default value) for the reduction
# Line 3739 | Line 3559 | public class ConcurrentHashMap<K,V>
3559       * @return the result of accumulating the given transformation
3560       * of all keys
3561       */
3562 <    public int reduceKeysToIntSequentially
3563 <        (ToIntFunction<? super K> transformer,
3564 <         int basis,
3565 <         IntBinaryOperator reducer) {
3562 >    public int reduceKeysToInt(long parallelismThreshold,
3563 >                               ToIntFunction<? super K> transformer,
3564 >                               int basis,
3565 >                               IntBinaryOperator reducer) {
3566          if (transformer == null || reducer == null)
3567              throw new NullPointerException();
3568 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3569 <        int r = basis;
3570 <        K k;
3751 <        while ((k = it.advanceKey()) != null)
3752 <            r = reducer.applyAsInt(r, transformer.applyAsInt(k));
3753 <        return r;
3568 >        return new MapReduceKeysToIntTask<K,V>
3569 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3570 >             null, transformer, basis, reducer).invoke();
3571      }
3572  
3573      /**
3574       * Performs the given action for each value.
3575       *
3576 +     * @param parallelismThreshold the (estimated) number of elements
3577 +     * needed for this operation to be executed in parallel.
3578       * @param action the action
3579       */
3580 <    public void forEachValueSequentially(Consumer<? super V> action) {
3581 <        new Traverser<K,V,Object>(this).forEachValue(action);
3580 >    public void forEachValue(long parallelismThreshold,
3581 >                             Consumer<? super V> action) {
3582 >        if (action == null)
3583 >            throw new NullPointerException();
3584 >        new ForEachValueTask<K,V>
3585 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3586 >             action).invoke();
3587      }
3588  
3589      /**
3590       * Performs the given action for each non-null transformation
3591       * of each value.
3592       *
3593 +     * @param parallelismThreshold the (estimated) number of elements
3594 +     * needed for this operation to be executed in parallel.
3595       * @param transformer a function returning the transformation
3596       * for an element, or null if there is no transformation (in
3597       * which case the action is not applied)
3598       * @param action the action
3599       */
3600 <    public <U> void forEachValueSequentially
3601 <        (Function<? super V, ? extends U> transformer,
3602 <         Consumer<? super U> action) {
3600 >    public <U> void forEachValue(long parallelismThreshold,
3601 >                                 Function<? super V, ? extends U> transformer,
3602 >                                 Consumer<? super U> action) {
3603          if (transformer == null || action == null)
3604              throw new NullPointerException();
3605 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3606 <        V v; U u;
3607 <        while ((v = it.advanceValue()) != null) {
3782 <            if ((u = transformer.apply(v)) != null)
3783 <                action.accept(u);
3784 <        }
3605 >        new ForEachTransformedValueTask<K,V,U>
3606 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3607 >             transformer, action).invoke();
3608      }
3609  
3610      /**
3611       * Returns a non-null result from applying the given search
3612 <     * function on each value, or null if none.
3612 >     * function on each value, or null if none.  Upon success,
3613 >     * further element processing is suppressed and the results of
3614 >     * any other parallel invocations of the search function are
3615 >     * ignored.
3616       *
3617 +     * @param parallelismThreshold the (estimated) number of elements
3618 +     * needed for this operation to be executed in parallel.
3619       * @param searchFunction a function returning a non-null
3620       * result on success, else null
3621       * @return a non-null result from applying the given search
3622       * function on each value, or null if none
3623       */
3624 <    public <U> U searchValuesSequentially
3625 <        (Function<? super V, ? extends U> searchFunction) {
3624 >    public <U> U searchValues(long parallelismThreshold,
3625 >                              Function<? super V, ? extends U> searchFunction) {
3626          if (searchFunction == null) throw new NullPointerException();
3627 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3628 <        V v; U u;
3629 <        while ((v = it.advanceValue()) != null) {
3802 <            if ((u = searchFunction.apply(v)) != null)
3803 <                return u;
3804 <        }
3805 <        return null;
3627 >        return new SearchValuesTask<K,V,U>
3628 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3629 >             searchFunction, new AtomicReference<U>()).invoke();
3630      }
3631  
3632      /**
3633       * Returns the result of accumulating all values using the
3634       * given reducer to combine values, or null if none.
3635       *
3636 +     * @param parallelismThreshold the (estimated) number of elements
3637 +     * needed for this operation to be executed in parallel.
3638       * @param reducer a commutative associative combining function
3639       * @return the result of accumulating all values
3640       */
3641 <    public V reduceValuesSequentially
3642 <        (BiFunction<? super V, ? super V, ? extends V> reducer) {
3641 >    public V reduceValues(long parallelismThreshold,
3642 >                          BiFunction<? super V, ? super V, ? extends V> reducer) {
3643          if (reducer == null) throw new NullPointerException();
3644 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3645 <        V r = null; V v;
3646 <        while ((v = it.advanceValue()) != null)
3821 <            r = (r == null) ? v : reducer.apply(r, v);
3822 <        return r;
3644 >        return new ReduceValuesTask<K,V>
3645 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3646 >             null, reducer).invoke();
3647      }
3648  
3649      /**
# Line 3827 | Line 3651 | public class ConcurrentHashMap<K,V>
3651       * of all values using the given reducer to combine values, or
3652       * null if none.
3653       *
3654 +     * @param parallelismThreshold the (estimated) number of elements
3655 +     * needed for this operation to be executed in parallel.
3656       * @param transformer a function returning the transformation
3657       * for an element, or null if there is no transformation (in
3658       * which case it is not combined)
# Line 3834 | Line 3660 | public class ConcurrentHashMap<K,V>
3660       * @return the result of accumulating the given transformation
3661       * of all values
3662       */
3663 <    public <U> U reduceValuesSequentially
3664 <        (Function<? super V, ? extends U> transformer,
3665 <         BiFunction<? super U, ? super U, ? extends U> reducer) {
3663 >    public <U> U reduceValues(long parallelismThreshold,
3664 >                              Function<? super V, ? extends U> transformer,
3665 >                              BiFunction<? super U, ? super U, ? extends U> reducer) {
3666          if (transformer == null || reducer == null)
3667              throw new NullPointerException();
3668 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3669 <        U r = null, u; V v;
3670 <        while ((v = it.advanceValue()) != null) {
3845 <            if ((u = transformer.apply(v)) != null)
3846 <                r = (r == null) ? u : reducer.apply(r, u);
3847 <        }
3848 <        return r;
3668 >        return new MapReduceValuesTask<K,V,U>
3669 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3670 >             null, transformer, reducer).invoke();
3671      }
3672  
3673      /**
# Line 3853 | Line 3675 | public class ConcurrentHashMap<K,V>
3675       * of all values using the given reducer to combine values,
3676       * and the given basis as an identity value.
3677       *
3678 +     * @param parallelismThreshold the (estimated) number of elements
3679 +     * needed for this operation to be executed in parallel.
3680       * @param transformer a function returning the transformation
3681       * for an element
3682       * @param basis the identity (initial default value) for the reduction
# Line 3860 | Line 3684 | public class ConcurrentHashMap<K,V>
3684       * @return the result of accumulating the given transformation
3685       * of all values
3686       */
3687 <    public double reduceValuesToDoubleSequentially
3688 <        (ToDoubleFunction<? super V> transformer,
3689 <         double basis,
3690 <         DoubleBinaryOperator reducer) {
3687 >    public double reduceValuesToDouble(long parallelismThreshold,
3688 >                                       ToDoubleFunction<? super V> transformer,
3689 >                                       double basis,
3690 >                                       DoubleBinaryOperator reducer) {
3691          if (transformer == null || reducer == null)
3692              throw new NullPointerException();
3693 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3694 <        double r = basis; V v;
3695 <        while ((v = it.advanceValue()) != null)
3872 <            r = reducer.applyAsDouble(r, transformer.applyAsDouble(v));
3873 <        return r;
3693 >        return new MapReduceValuesToDoubleTask<K,V>
3694 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3695 >             null, transformer, basis, reducer).invoke();
3696      }
3697  
3698      /**
# Line 3878 | Line 3700 | public class ConcurrentHashMap<K,V>
3700       * of all values using the given reducer to combine values,
3701       * and the given basis as an identity value.
3702       *
3703 +     * @param parallelismThreshold the (estimated) number of elements
3704 +     * needed for this operation to be executed in parallel.
3705       * @param transformer a function returning the transformation
3706       * for an element
3707       * @param basis the identity (initial default value) for the reduction
# Line 3885 | Line 3709 | public class ConcurrentHashMap<K,V>
3709       * @return the result of accumulating the given transformation
3710       * of all values
3711       */
3712 <    public long reduceValuesToLongSequentially
3713 <        (ToLongFunction<? super V> transformer,
3714 <         long basis,
3715 <         LongBinaryOperator reducer) {
3712 >    public long reduceValuesToLong(long parallelismThreshold,
3713 >                                   ToLongFunction<? super V> transformer,
3714 >                                   long basis,
3715 >                                   LongBinaryOperator reducer) {
3716          if (transformer == null || reducer == null)
3717              throw new NullPointerException();
3718 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3719 <        long r = basis; V v;
3720 <        while ((v = it.advanceValue()) != null)
3897 <            r = reducer.applyAsLong(r, transformer.applyAsLong(v));
3898 <        return r;
3718 >        return new MapReduceValuesToLongTask<K,V>
3719 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3720 >             null, transformer, basis, reducer).invoke();
3721      }
3722  
3723      /**
# Line 3903 | Line 3725 | public class ConcurrentHashMap<K,V>
3725       * of all values using the given reducer to combine values,
3726       * and the given basis as an identity value.
3727       *
3728 +     * @param parallelismThreshold the (estimated) number of elements
3729 +     * needed for this operation to be executed in parallel.
3730       * @param transformer a function returning the transformation
3731       * for an element
3732       * @param basis the identity (initial default value) for the reduction
# Line 3910 | Line 3734 | public class ConcurrentHashMap<K,V>
3734       * @return the result of accumulating the given transformation
3735       * of all values
3736       */
3737 <    public int reduceValuesToIntSequentially
3738 <        (ToIntFunction<? super V> transformer,
3739 <         int basis,
3740 <         IntBinaryOperator reducer) {
3737 >    public int reduceValuesToInt(long parallelismThreshold,
3738 >                                 ToIntFunction<? super V> transformer,
3739 >                                 int basis,
3740 >                                 IntBinaryOperator reducer) {
3741          if (transformer == null || reducer == null)
3742              throw new NullPointerException();
3743 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3744 <        int r = basis; V v;
3745 <        while ((v = it.advanceValue()) != null)
3922 <            r = reducer.applyAsInt(r, transformer.applyAsInt(v));
3923 <        return r;
3743 >        return new MapReduceValuesToIntTask<K,V>
3744 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3745 >             null, transformer, basis, reducer).invoke();
3746      }
3747  
3748      /**
3749       * Performs the given action for each entry.
3750       *
3751 +     * @param parallelismThreshold the (estimated) number of elements
3752 +     * needed for this operation to be executed in parallel.
3753       * @param action the action
3754       */
3755 <    public void forEachEntrySequentially
3756 <        (Consumer<? super Map.Entry<K,V>> action) {
3755 >    public void forEachEntry(long parallelismThreshold,
3756 >                             Consumer<? super Map.Entry<K,V>> action) {
3757          if (action == null) throw new NullPointerException();
3758 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3759 <        V v;
3936 <        while ((v = it.advanceValue()) != null)
3937 <            action.accept(entryFor(it.nextKey, v));
3758 >        new ForEachEntryTask<K,V>(null, batchFor(parallelismThreshold), 0, 0, table,
3759 >                                  action).invoke();
3760      }
3761  
3762      /**
3763       * Performs the given action for each non-null transformation
3764       * of each entry.
3765       *
3766 +     * @param parallelismThreshold the (estimated) number of elements
3767 +     * needed for this operation to be executed in parallel.
3768       * @param transformer a function returning the transformation
3769       * for an element, or null if there is no transformation (in
3770       * which case the action is not applied)
3771       * @param action the action
3772       */
3773 <    public <U> void forEachEntrySequentially
3774 <        (Function<Map.Entry<K,V>, ? extends U> transformer,
3775 <         Consumer<? super U> action) {
3773 >    public <U> void forEachEntry(long parallelismThreshold,
3774 >                                 Function<Map.Entry<K,V>, ? extends U> transformer,
3775 >                                 Consumer<? super U> action) {
3776          if (transformer == null || action == null)
3777              throw new NullPointerException();
3778 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3779 <        V v; U u;
3780 <        while ((v = it.advanceValue()) != null) {
3957 <            if ((u = transformer.apply(entryFor(it.nextKey, v))) != null)
3958 <                action.accept(u);
3959 <        }
3778 >        new ForEachTransformedEntryTask<K,V,U>
3779 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3780 >             transformer, action).invoke();
3781      }
3782  
3783      /**
3784       * Returns a non-null result from applying the given search
3785 <     * function on each entry, or null if none.
3785 >     * function on each entry, or null if none.  Upon success,
3786 >     * further element processing is suppressed and the results of
3787 >     * any other parallel invocations of the search function are
3788 >     * ignored.
3789       *
3790 +     * @param parallelismThreshold the (estimated) number of elements
3791 +     * needed for this operation to be executed in parallel.
3792       * @param searchFunction a function returning a non-null
3793       * result on success, else null
3794       * @return a non-null result from applying the given search
3795       * function on each entry, or null if none
3796       */
3797 <    public <U> U searchEntriesSequentially
3798 <        (Function<Map.Entry<K,V>, ? extends U> searchFunction) {
3797 >    public <U> U searchEntries(long parallelismThreshold,
3798 >                               Function<Map.Entry<K,V>, ? extends U> searchFunction) {
3799          if (searchFunction == null) throw new NullPointerException();
3800 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3801 <        V v; U u;
3802 <        while ((v = it.advanceValue()) != null) {
3977 <            if ((u = searchFunction.apply(entryFor(it.nextKey, v))) != null)
3978 <                return u;
3979 <        }
3980 <        return null;
3800 >        return new SearchEntriesTask<K,V,U>
3801 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3802 >             searchFunction, new AtomicReference<U>()).invoke();
3803      }
3804  
3805      /**
3806       * Returns the result of accumulating all entries using the
3807       * given reducer to combine values, or null if none.
3808       *
3809 +     * @param parallelismThreshold the (estimated) number of elements
3810 +     * needed for this operation to be executed in parallel.
3811       * @param reducer a commutative associative combining function
3812       * @return the result of accumulating all entries
3813       */
3814 <    public Map.Entry<K,V> reduceEntriesSequentially
3815 <        (BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
3814 >    public Map.Entry<K,V> reduceEntries(long parallelismThreshold,
3815 >                                        BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
3816          if (reducer == null) throw new NullPointerException();
3817 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3818 <        Map.Entry<K,V> r = null; V v;
3819 <        while ((v = it.advanceValue()) != null) {
3996 <            Map.Entry<K,V> u = entryFor(it.nextKey, v);
3997 <            r = (r == null) ? u : reducer.apply(r, u);
3998 <        }
3999 <        return r;
3817 >        return new ReduceEntriesTask<K,V>
3818 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3819 >             null, reducer).invoke();
3820      }
3821  
3822      /**
# Line 4004 | Line 3824 | public class ConcurrentHashMap<K,V>
3824       * of all entries using the given reducer to combine values,
3825       * or null if none.
3826       *
3827 +     * @param parallelismThreshold the (estimated) number of elements
3828 +     * needed for this operation to be executed in parallel.
3829       * @param transformer a function returning the transformation
3830       * for an element, or null if there is no transformation (in
3831       * which case it is not combined)
# Line 4011 | Line 3833 | public class ConcurrentHashMap<K,V>
3833       * @return the result of accumulating the given transformation
3834       * of all entries
3835       */
3836 <    public <U> U reduceEntriesSequentially
3837 <        (Function<Map.Entry<K,V>, ? extends U> transformer,
3838 <         BiFunction<? super U, ? super U, ? extends U> reducer) {
3836 >    public <U> U reduceEntries(long parallelismThreshold,
3837 >                               Function<Map.Entry<K,V>, ? extends U> transformer,
3838 >                               BiFunction<? super U, ? super U, ? extends U> reducer) {
3839          if (transformer == null || reducer == null)
3840              throw new NullPointerException();
3841 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3842 <        U r = null, u; V v;
3843 <        while ((v = it.advanceValue()) != null) {
4022 <            if ((u = transformer.apply(entryFor(it.nextKey, v))) != null)
4023 <                r = (r == null) ? u : reducer.apply(r, u);
4024 <        }
4025 <        return r;
3841 >        return new MapReduceEntriesTask<K,V,U>
3842 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3843 >             null, transformer, reducer).invoke();
3844      }
3845  
3846      /**
# Line 4030 | Line 3848 | public class ConcurrentHashMap<K,V>
3848       * of all entries using the given reducer to combine values,
3849       * and the given basis as an identity value.
3850       *
3851 +     * @param parallelismThreshold the (estimated) number of elements
3852 +     * needed for this operation to be executed in parallel.
3853       * @param transformer a function returning the transformation
3854       * for an element
3855       * @param basis the identity (initial default value) for the reduction
# Line 4037 | Line 3857 | public class ConcurrentHashMap<K,V>
3857       * @return the result of accumulating the given transformation
3858       * of all entries
3859       */
3860 <    public double reduceEntriesToDoubleSequentially
3861 <        (ToDoubleFunction<Map.Entry<K,V>> transformer,
3862 <         double basis,
3863 <         DoubleBinaryOperator reducer) {
3860 >    public double reduceEntriesToDouble(long parallelismThreshold,
3861 >                                        ToDoubleFunction<Map.Entry<K,V>> transformer,
3862 >                                        double basis,
3863 >                                        DoubleBinaryOperator reducer) {
3864          if (transformer == null || reducer == null)
3865              throw new NullPointerException();
3866 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3867 <        double r = basis; V v;
3868 <        while ((v = it.advanceValue()) != null)
4049 <            r = reducer.applyAsDouble(r, transformer.applyAsDouble(entryFor(it.nextKey, v)));
4050 <        return r;
3866 >        return new MapReduceEntriesToDoubleTask<K,V>
3867 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3868 >             null, transformer, basis, reducer).invoke();
3869      }
3870  
3871      /**
# Line 4055 | Line 3873 | public class ConcurrentHashMap<K,V>
3873       * of all entries using the given reducer to combine values,
3874       * and the given basis as an identity value.
3875       *
3876 +     * @param parallelismThreshold the (estimated) number of elements
3877 +     * needed for this operation to be executed in parallel.
3878       * @param transformer a function returning the transformation
3879       * for an element
3880       * @param basis the identity (initial default value) for the reduction
# Line 4062 | Line 3882 | public class ConcurrentHashMap<K,V>
3882       * @return the result of accumulating the given transformation
3883       * of all entries
3884       */
3885 <    public long reduceEntriesToLongSequentially
3886 <        (ToLongFunction<Map.Entry<K,V>> transformer,
3887 <         long basis,
3888 <         LongBinaryOperator reducer) {
3885 >    public long reduceEntriesToLong(long parallelismThreshold,
3886 >                                    ToLongFunction<Map.Entry<K,V>> transformer,
3887 >                                    long basis,
3888 >                                    LongBinaryOperator reducer) {
3889          if (transformer == null || reducer == null)
3890              throw new NullPointerException();
3891 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3892 <        long r = basis; V v;
3893 <        while ((v = it.advanceValue()) != null)
4074 <            r = reducer.applyAsLong(r, transformer.applyAsLong(entryFor(it.nextKey, v)));
4075 <        return r;
3891 >        return new MapReduceEntriesToLongTask<K,V>
3892 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3893 >             null, transformer, basis, reducer).invoke();
3894      }
3895  
3896      /**
# Line 4080 | Line 3898 | public class ConcurrentHashMap<K,V>
3898       * of all entries using the given reducer to combine values,
3899       * and the given basis as an identity value.
3900       *
3901 +     * @param parallelismThreshold the (estimated) number of elements
3902 +     * needed for this operation to be executed in parallel.
3903       * @param transformer a function returning the transformation
3904       * for an element
3905       * @param basis the identity (initial default value) for the reduction
# Line 4087 | Line 3907 | public class ConcurrentHashMap<K,V>
3907       * @return the result of accumulating the given transformation
3908       * of all entries
3909       */
3910 <    public int reduceEntriesToIntSequentially
3911 <        (ToIntFunction<Map.Entry<K,V>> transformer,
3912 <         int basis,
3913 <         IntBinaryOperator reducer) {
3910 >    public int reduceEntriesToInt(long parallelismThreshold,
3911 >                                  ToIntFunction<Map.Entry<K,V>> transformer,
3912 >                                  int basis,
3913 >                                  IntBinaryOperator reducer) {
3914          if (transformer == null || reducer == null)
3915              throw new NullPointerException();
3916 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3917 <        int r = basis; V v;
3918 <        while ((v = it.advanceValue()) != null)
4099 <            r = reducer.applyAsInt(r, transformer.applyAsInt(entryFor(it.nextKey, v)));
4100 <        return r;
4101 <    }
4102 <
4103 <    // Overrides of other default Map methods
4104 <
4105 <    public void forEach(BiConsumer<? super K, ? super V> action) {
4106 <        forEachSequentially(action);
4107 <    }
4108 <
4109 <    public void replaceAll(BiFunction<? super K, ? super V, ? extends V> function) {
4110 <        if (function == null) throw new NullPointerException();
4111 <        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
4112 <        V v;
4113 <        while ((v = it.advanceValue()) != null) {
4114 <            K k = it.nextKey;
4115 <            internalPut(k, function.apply(k, v), false);
4116 <        }
4117 <    }
4118 <
4119 <    // Parallel bulk operations
4120 <
4121 <    /**
4122 <     * Performs the given action for each (key, value).
4123 <     *
4124 <     * @param action the action
4125 <     */
4126 <    public void forEachInParallel(BiConsumer<? super K,? super V> action) {
4127 <        ForkJoinTasks.forEach
4128 <            (this, action).invoke();
4129 <    }
4130 <
4131 <    /**
4132 <     * Performs the given action for each non-null transformation
4133 <     * of each (key, value).
4134 <     *
4135 <     * @param transformer a function returning the transformation
4136 <     * for an element, or null if there is no transformation (in
4137 <     * which case the action is not applied)
4138 <     * @param action the action
4139 <     */
4140 <    public <U> void forEachInParallel
4141 <        (BiFunction<? super K, ? super V, ? extends U> transformer,
4142 <                            Consumer<? super U> action) {
4143 <        ForkJoinTasks.forEach
4144 <            (this, transformer, action).invoke();
4145 <    }
4146 <
4147 <    /**
4148 <     * Returns a non-null result from applying the given search
4149 <     * function on each (key, value), or null if none.  Upon
4150 <     * success, further element processing is suppressed and the
4151 <     * results of any other parallel invocations of the search
4152 <     * function are ignored.
4153 <     *
4154 <     * @param searchFunction a function returning a non-null
4155 <     * result on success, else null
4156 <     * @return a non-null result from applying the given search
4157 <     * function on each (key, value), or null if none
4158 <     */
4159 <    public <U> U searchInParallel
4160 <        (BiFunction<? super K, ? super V, ? extends U> searchFunction) {
4161 <        return ForkJoinTasks.search
4162 <            (this, searchFunction).invoke();
4163 <    }
4164 <
4165 <    /**
4166 <     * Returns the result of accumulating the given transformation
4167 <     * of all (key, value) pairs using the given reducer to
4168 <     * combine values, or null if none.
4169 <     *
4170 <     * @param transformer a function returning the transformation
4171 <     * for an element, or null if there is no transformation (in
4172 <     * which case it is not combined)
4173 <     * @param reducer a commutative associative combining function
4174 <     * @return the result of accumulating the given transformation
4175 <     * of all (key, value) pairs
4176 <     */
4177 <    public <U> U reduceInParallel
4178 <        (BiFunction<? super K, ? super V, ? extends U> transformer,
4179 <         BiFunction<? super U, ? super U, ? extends U> reducer) {
4180 <        return ForkJoinTasks.reduce
4181 <            (this, transformer, reducer).invoke();
4182 <    }
4183 <
4184 <    /**
4185 <     * Returns the result of accumulating the given transformation
4186 <     * of all (key, value) pairs using the given reducer to
4187 <     * combine values, and the given basis as an identity value.
4188 <     *
4189 <     * @param transformer a function returning the transformation
4190 <     * for an element
4191 <     * @param basis the identity (initial default value) for the reduction
4192 <     * @param reducer a commutative associative combining function
4193 <     * @return the result of accumulating the given transformation
4194 <     * of all (key, value) pairs
4195 <     */
4196 <    public double reduceToDoubleInParallel
4197 <        (ToDoubleBiFunction<? super K, ? super V> transformer,
4198 <         double basis,
4199 <         DoubleBinaryOperator reducer) {
4200 <        return ForkJoinTasks.reduceToDouble
4201 <            (this, transformer, basis, reducer).invoke();
4202 <    }
4203 <
4204 <    /**
4205 <     * Returns the result of accumulating the given transformation
4206 <     * of all (key, value) pairs using the given reducer to
4207 <     * combine values, and the given basis as an identity value.
4208 <     *
4209 <     * @param transformer a function returning the transformation
4210 <     * for an element
4211 <     * @param basis the identity (initial default value) for the reduction
4212 <     * @param reducer a commutative associative combining function
4213 <     * @return the result of accumulating the given transformation
4214 <     * of all (key, value) pairs
4215 <     */
4216 <    public long reduceToLongInParallel
4217 <        (ToLongBiFunction<? super K, ? super V> transformer,
4218 <         long basis,
4219 <         LongBinaryOperator reducer) {
4220 <        return ForkJoinTasks.reduceToLong
4221 <            (this, transformer, basis, reducer).invoke();
4222 <    }
4223 <
4224 <    /**
4225 <     * Returns the result of accumulating the given transformation
4226 <     * of all (key, value) pairs using the given reducer to
4227 <     * combine values, and the given basis as an identity value.
4228 <     *
4229 <     * @param transformer a function returning the transformation
4230 <     * for an element
4231 <     * @param basis the identity (initial default value) for the reduction
4232 <     * @param reducer a commutative associative combining function
4233 <     * @return the result of accumulating the given transformation
4234 <     * of all (key, value) pairs
4235 <     */
4236 <    public int reduceToIntInParallel
4237 <        (ToIntBiFunction<? super K, ? super V> transformer,
4238 <         int basis,
4239 <         IntBinaryOperator reducer) {
4240 <        return ForkJoinTasks.reduceToInt
4241 <            (this, transformer, basis, reducer).invoke();
4242 <    }
4243 <
4244 <    /**
4245 <     * Performs the given action for each key.
4246 <     *
4247 <     * @param action the action
4248 <     */
4249 <    public void forEachKeyInParallel(Consumer<? super K> action) {
4250 <        ForkJoinTasks.forEachKey
4251 <            (this, action).invoke();
4252 <    }
4253 <
4254 <    /**
4255 <     * Performs the given action for each non-null transformation
4256 <     * of each key.
4257 <     *
4258 <     * @param transformer a function returning the transformation
4259 <     * for an element, or null if there is no transformation (in
4260 <     * which case the action is not applied)
4261 <     * @param action the action
4262 <     */
4263 <    public <U> void forEachKeyInParallel
4264 <        (Function<? super K, ? extends U> transformer,
4265 <         Consumer<? super U> action) {
4266 <        ForkJoinTasks.forEachKey
4267 <            (this, transformer, action).invoke();
4268 <    }
4269 <
4270 <    /**
4271 <     * Returns a non-null result from applying the given search
4272 <     * function on each key, or null if none. Upon success,
4273 <     * further element processing is suppressed and the results of
4274 <     * any other parallel invocations of the search function are
4275 <     * ignored.
4276 <     *
4277 <     * @param searchFunction a function returning a non-null
4278 <     * result on success, else null
4279 <     * @return a non-null result from applying the given search
4280 <     * function on each key, or null if none
4281 <     */
4282 <    public <U> U searchKeysInParallel
4283 <        (Function<? super K, ? extends U> searchFunction) {
4284 <        return ForkJoinTasks.searchKeys
4285 <            (this, searchFunction).invoke();
4286 <    }
4287 <
4288 <    /**
4289 <     * Returns the result of accumulating all keys using the given
4290 <     * reducer to combine values, or null if none.
4291 <     *
4292 <     * @param reducer a commutative associative combining function
4293 <     * @return the result of accumulating all keys using the given
4294 <     * reducer to combine values, or null if none
4295 <     */
4296 <    public K reduceKeysInParallel
4297 <        (BiFunction<? super K, ? super K, ? extends K> reducer) {
4298 <        return ForkJoinTasks.reduceKeys
4299 <            (this, reducer).invoke();
4300 <    }
4301 <
4302 <    /**
4303 <     * Returns the result of accumulating the given transformation
4304 <     * of all keys using the given reducer to combine values, or
4305 <     * null if none.
4306 <     *
4307 <     * @param transformer a function returning the transformation
4308 <     * for an element, or null if there is no transformation (in
4309 <     * which case it is not combined)
4310 <     * @param reducer a commutative associative combining function
4311 <     * @return the result of accumulating the given transformation
4312 <     * of all keys
4313 <     */
4314 <    public <U> U reduceKeysInParallel
4315 <        (Function<? super K, ? extends U> transformer,
4316 <         BiFunction<? super U, ? super U, ? extends U> reducer) {
4317 <        return ForkJoinTasks.reduceKeys
4318 <            (this, transformer, reducer).invoke();
4319 <    }
4320 <
4321 <    /**
4322 <     * Returns the result of accumulating the given transformation
4323 <     * of all keys using the given reducer to combine values, and
4324 <     * the given basis as an identity value.
4325 <     *
4326 <     * @param transformer a function returning the transformation
4327 <     * for an element
4328 <     * @param basis the identity (initial default value) for the reduction
4329 <     * @param reducer a commutative associative combining function
4330 <     * @return the result of accumulating the given transformation
4331 <     * of all keys
4332 <     */
4333 <    public double reduceKeysToDoubleInParallel
4334 <        (ToDoubleFunction<? super K> transformer,
4335 <         double basis,
4336 <         DoubleBinaryOperator reducer) {
4337 <        return ForkJoinTasks.reduceKeysToDouble
4338 <            (this, transformer, basis, reducer).invoke();
4339 <    }
4340 <
4341 <    /**
4342 <     * Returns the result of accumulating the given transformation
4343 <     * of all keys using the given reducer to combine values, and
4344 <     * the given basis as an identity value.
4345 <     *
4346 <     * @param transformer a function returning the transformation
4347 <     * for an element
4348 <     * @param basis the identity (initial default value) for the reduction
4349 <     * @param reducer a commutative associative combining function
4350 <     * @return the result of accumulating the given transformation
4351 <     * of all keys
4352 <     */
4353 <    public long reduceKeysToLongInParallel
4354 <        (ToLongFunction<? super K> transformer,
4355 <         long basis,
4356 <         LongBinaryOperator reducer) {
4357 <        return ForkJoinTasks.reduceKeysToLong
4358 <            (this, transformer, basis, reducer).invoke();
4359 <    }
4360 <
4361 <    /**
4362 <     * Returns the result of accumulating the given transformation
4363 <     * of all keys using the given reducer to combine values, and
4364 <     * the given basis as an identity value.
4365 <     *
4366 <     * @param transformer a function returning the transformation
4367 <     * for an element
4368 <     * @param basis the identity (initial default value) for the reduction
4369 <     * @param reducer a commutative associative combining function
4370 <     * @return the result of accumulating the given transformation
4371 <     * of all keys
4372 <     */
4373 <    public int reduceKeysToIntInParallel
4374 <        (ToIntFunction<? super K> transformer,
4375 <         int basis,
4376 <         IntBinaryOperator reducer) {
4377 <        return ForkJoinTasks.reduceKeysToInt
4378 <            (this, transformer, basis, reducer).invoke();
4379 <    }
4380 <
4381 <    /**
4382 <     * Performs the given action for each value.
4383 <     *
4384 <     * @param action the action
4385 <     */
4386 <    public void forEachValueInParallel(Consumer<? super V> action) {
4387 <        ForkJoinTasks.forEachValue
4388 <            (this, action).invoke();
4389 <    }
4390 <
4391 <    /**
4392 <     * Performs the given action for each non-null transformation
4393 <     * of each value.
4394 <     *
4395 <     * @param transformer a function returning the transformation
4396 <     * for an element, or null if there is no transformation (in
4397 <     * which case the action is not applied)
4398 <     * @param action the action
4399 <     */
4400 <    public <U> void forEachValueInParallel
4401 <        (Function<? super V, ? extends U> transformer,
4402 <         Consumer<? super U> action) {
4403 <        ForkJoinTasks.forEachValue
4404 <            (this, transformer, action).invoke();
4405 <    }
4406 <
4407 <    /**
4408 <     * Returns a non-null result from applying the given search
4409 <     * function on each value, or null if none.  Upon success,
4410 <     * further element processing is suppressed and the results of
4411 <     * any other parallel invocations of the search function are
4412 <     * ignored.
4413 <     *
4414 <     * @param searchFunction a function returning a non-null
4415 <     * result on success, else null
4416 <     * @return a non-null result from applying the given search
4417 <     * function on each value, or null if none
4418 <     */
4419 <    public <U> U searchValuesInParallel
4420 <        (Function<? super V, ? extends U> searchFunction) {
4421 <        return ForkJoinTasks.searchValues
4422 <            (this, searchFunction).invoke();
4423 <    }
4424 <
4425 <    /**
4426 <     * Returns the result of accumulating all values using the
4427 <     * given reducer to combine values, or null if none.
4428 <     *
4429 <     * @param reducer a commutative associative combining function
4430 <     * @return the result of accumulating all values
4431 <     */
4432 <    public V reduceValuesInParallel
4433 <        (BiFunction<? super V, ? super V, ? extends V> reducer) {
4434 <        return ForkJoinTasks.reduceValues
4435 <            (this, reducer).invoke();
4436 <    }
4437 <
4438 <    /**
4439 <     * Returns the result of accumulating the given transformation
4440 <     * of all values using the given reducer to combine values, or
4441 <     * null if none.
4442 <     *
4443 <     * @param transformer a function returning the transformation
4444 <     * for an element, or null if there is no transformation (in
4445 <     * which case it is not combined)
4446 <     * @param reducer a commutative associative combining function
4447 <     * @return the result of accumulating the given transformation
4448 <     * of all values
4449 <     */
4450 <    public <U> U reduceValuesInParallel
4451 <        (Function<? super V, ? extends U> transformer,
4452 <         BiFunction<? super U, ? super U, ? extends U> reducer) {
4453 <        return ForkJoinTasks.reduceValues
4454 <            (this, transformer, reducer).invoke();
4455 <    }
4456 <
4457 <    /**
4458 <     * Returns the result of accumulating the given transformation
4459 <     * of all values using the given reducer to combine values,
4460 <     * and the given basis as an identity value.
4461 <     *
4462 <     * @param transformer a function returning the transformation
4463 <     * for an element
4464 <     * @param basis the identity (initial default value) for the reduction
4465 <     * @param reducer a commutative associative combining function
4466 <     * @return the result of accumulating the given transformation
4467 <     * of all values
4468 <     */
4469 <    public double reduceValuesToDoubleInParallel
4470 <        (ToDoubleFunction<? super V> transformer,
4471 <         double basis,
4472 <         DoubleBinaryOperator reducer) {
4473 <        return ForkJoinTasks.reduceValuesToDouble
4474 <            (this, transformer, basis, reducer).invoke();
4475 <    }
4476 <
4477 <    /**
4478 <     * Returns the result of accumulating the given transformation
4479 <     * of all values using the given reducer to combine values,
4480 <     * and the given basis as an identity value.
4481 <     *
4482 <     * @param transformer a function returning the transformation
4483 <     * for an element
4484 <     * @param basis the identity (initial default value) for the reduction
4485 <     * @param reducer a commutative associative combining function
4486 <     * @return the result of accumulating the given transformation
4487 <     * of all values
4488 <     */
4489 <    public long reduceValuesToLongInParallel
4490 <        (ToLongFunction<? super V> transformer,
4491 <         long basis,
4492 <         LongBinaryOperator reducer) {
4493 <        return ForkJoinTasks.reduceValuesToLong
4494 <            (this, transformer, basis, reducer).invoke();
4495 <    }
4496 <
4497 <    /**
4498 <     * Returns the result of accumulating the given transformation
4499 <     * of all values using the given reducer to combine values,
4500 <     * and the given basis as an identity value.
4501 <     *
4502 <     * @param transformer a function returning the transformation
4503 <     * for an element
4504 <     * @param basis the identity (initial default value) for the reduction
4505 <     * @param reducer a commutative associative combining function
4506 <     * @return the result of accumulating the given transformation
4507 <     * of all values
4508 <     */
4509 <    public int reduceValuesToIntInParallel
4510 <        (ToIntFunction<? super V> transformer,
4511 <         int basis,
4512 <         IntBinaryOperator reducer) {
4513 <        return ForkJoinTasks.reduceValuesToInt
4514 <            (this, transformer, basis, reducer).invoke();
4515 <    }
4516 <
4517 <    /**
4518 <     * Performs the given action for each entry.
4519 <     *
4520 <     * @param action the action
4521 <     */
4522 <    public void forEachEntryInParallel(Consumer<? super Map.Entry<K,V>> action) {
4523 <        ForkJoinTasks.forEachEntry
4524 <            (this, action).invoke();
4525 <    }
4526 <
4527 <    /**
4528 <     * Performs the given action for each non-null transformation
4529 <     * of each entry.
4530 <     *
4531 <     * @param transformer a function returning the transformation
4532 <     * for an element, or null if there is no transformation (in
4533 <     * which case the action is not applied)
4534 <     * @param action the action
4535 <     */
4536 <    public <U> void forEachEntryInParallel
4537 <        (Function<Map.Entry<K,V>, ? extends U> transformer,
4538 <         Consumer<? super U> action) {
4539 <        ForkJoinTasks.forEachEntry
4540 <            (this, transformer, action).invoke();
4541 <    }
4542 <
4543 <    /**
4544 <     * Returns a non-null result from applying the given search
4545 <     * function on each entry, or null if none.  Upon success,
4546 <     * further element processing is suppressed and the results of
4547 <     * any other parallel invocations of the search function are
4548 <     * ignored.
4549 <     *
4550 <     * @param searchFunction a function returning a non-null
4551 <     * result on success, else null
4552 <     * @return a non-null result from applying the given search
4553 <     * function on each entry, or null if none
4554 <     */
4555 <    public <U> U searchEntriesInParallel
4556 <        (Function<Map.Entry<K,V>, ? extends U> searchFunction) {
4557 <        return ForkJoinTasks.searchEntries
4558 <            (this, searchFunction).invoke();
4559 <    }
4560 <
4561 <    /**
4562 <     * Returns the result of accumulating all entries using the
4563 <     * given reducer to combine values, or null if none.
4564 <     *
4565 <     * @param reducer a commutative associative combining function
4566 <     * @return the result of accumulating all entries
4567 <     */
4568 <    public Map.Entry<K,V> reduceEntriesInParallel
4569 <        (BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
4570 <        return ForkJoinTasks.reduceEntries
4571 <            (this, reducer).invoke();
4572 <    }
4573 <
4574 <    /**
4575 <     * Returns the result of accumulating the given transformation
4576 <     * of all entries using the given reducer to combine values,
4577 <     * or null if none.
4578 <     *
4579 <     * @param transformer a function returning the transformation
4580 <     * for an element, or null if there is no transformation (in
4581 <     * which case it is not combined)
4582 <     * @param reducer a commutative associative combining function
4583 <     * @return the result of accumulating the given transformation
4584 <     * of all entries
4585 <     */
4586 <    public <U> U reduceEntriesInParallel
4587 <        (Function<Map.Entry<K,V>, ? extends U> transformer,
4588 <         BiFunction<? super U, ? super U, ? extends U> reducer) {
4589 <        return ForkJoinTasks.reduceEntries
4590 <            (this, transformer, reducer).invoke();
4591 <    }
4592 <
4593 <    /**
4594 <     * Returns the result of accumulating the given transformation
4595 <     * of all entries using the given reducer to combine values,
4596 <     * and the given basis as an identity value.
4597 <     *
4598 <     * @param transformer a function returning the transformation
4599 <     * for an element
4600 <     * @param basis the identity (initial default value) for the reduction
4601 <     * @param reducer a commutative associative combining function
4602 <     * @return the result of accumulating the given transformation
4603 <     * of all entries
4604 <     */
4605 <    public double reduceEntriesToDoubleInParallel
4606 <        (ToDoubleFunction<Map.Entry<K,V>> transformer,
4607 <         double basis,
4608 <         DoubleBinaryOperator reducer) {
4609 <        return ForkJoinTasks.reduceEntriesToDouble
4610 <            (this, transformer, basis, reducer).invoke();
4611 <    }
4612 <
4613 <    /**
4614 <     * Returns the result of accumulating the given transformation
4615 <     * of all entries using the given reducer to combine values,
4616 <     * and the given basis as an identity value.
4617 <     *
4618 <     * @param transformer a function returning the transformation
4619 <     * for an element
4620 <     * @param basis the identity (initial default value) for the reduction
4621 <     * @param reducer a commutative associative combining function
4622 <     * @return the result of accumulating the given transformation
4623 <     * of all entries
4624 <     */
4625 <    public long reduceEntriesToLongInParallel
4626 <        (ToLongFunction<Map.Entry<K,V>> transformer,
4627 <         long basis,
4628 <         LongBinaryOperator reducer) {
4629 <        return ForkJoinTasks.reduceEntriesToLong
4630 <            (this, transformer, basis, reducer).invoke();
4631 <    }
4632 <
4633 <    /**
4634 <     * Returns the result of accumulating the given transformation
4635 <     * of all entries using the given reducer to combine values,
4636 <     * and the given basis as an identity value.
4637 <     *
4638 <     * @param transformer a function returning the transformation
4639 <     * for an element
4640 <     * @param basis the identity (initial default value) for the reduction
4641 <     * @param reducer a commutative associative combining function
4642 <     * @return the result of accumulating the given transformation
4643 <     * of all entries
4644 <     */
4645 <    public int reduceEntriesToIntInParallel
4646 <        (ToIntFunction<Map.Entry<K,V>> transformer,
4647 <         int basis,
4648 <         IntBinaryOperator reducer) {
4649 <        return ForkJoinTasks.reduceEntriesToInt
4650 <            (this, transformer, basis, reducer).invoke();
3916 >        return new MapReduceEntriesToIntTask<K,V>
3917 >            (null, batchFor(parallelismThreshold), 0, 0, table,
3918 >             null, transformer, basis, reducer).invoke();
3919      }
3920  
3921  
# Line 4656 | Line 3924 | public class ConcurrentHashMap<K,V>
3924      /**
3925       * Base class for views.
3926       */
3927 <    abstract static class CHMCollectionView<K,V,E>
3928 <            implements Collection<E>, java.io.Serializable {
3927 >    abstract static class CollectionView<K,V,E>
3928 >        implements Collection<E>, java.io.Serializable {
3929          private static final long serialVersionUID = 7249069246763182397L;
3930          final ConcurrentHashMap<K,V> map;
3931 <        CHMCollectionView(ConcurrentHashMap<K,V> map)  { this.map = map; }
3931 >        CollectionView(ConcurrentHashMap<K,V> map)  { this.map = map; }
3932  
3933          /**
3934           * Returns the map backing this view.
# Line 4715 | Line 3983 | public class ConcurrentHashMap<K,V>
3983              return (i == n) ? r : Arrays.copyOf(r, i);
3984          }
3985  
4718        @SuppressWarnings("unchecked")
3986          public final <T> T[] toArray(T[] a) {
3987              long sz = map.mappingCount();
3988              if (sz > MAX_ARRAY_SIZE)
# Line 4806 | Line 4073 | public class ConcurrentHashMap<K,V>
4073  
4074      }
4075  
4809    abstract static class CHMSetView<K,V,E>
4810            extends CHMCollectionView<K,V,E>
4811            implements Set<E>, java.io.Serializable {
4812        private static final long serialVersionUID = 7249069246763182397L;
4813        CHMSetView(ConcurrentHashMap<K,V> map) { super(map); }
4814
4815        // Implement Set API
4816
4817        /**
4818         * Implements {@link Set#hashCode()}.
4819         * @return the hash code value for this set
4820         */
4821        public final int hashCode() {
4822            int h = 0;
4823            for (E e : this)
4824                h += e.hashCode();
4825            return h;
4826        }
4827
4828        /**
4829         * Implements {@link Set#equals(Object)}.
4830         * @param o object to be compared for equality with this set
4831         * @return {@code true} if the specified object is equal to this set
4832        */
4833        public final boolean equals(Object o) {
4834            Set<?> c;
4835            return ((o instanceof Set) &&
4836                    ((c = (Set<?>)o) == this ||
4837                     (containsAll(c) && c.containsAll(this))));
4838        }
4839    }
4840
4076      /**
4077       * A view of a ConcurrentHashMap as a {@link Set} of keys, in
4078       * which additions may optionally be enabled by mapping to a
# Line 4847 | Line 4082 | public class ConcurrentHashMap<K,V>
4082       * {@link #newKeySet() newKeySet()},
4083       * {@link #newKeySet(int) newKeySet(int)}.
4084       */
4085 <    public static class KeySetView<K,V>
4086 <            extends CHMSetView<K,V,K>
4852 <            implements Set<K>, java.io.Serializable {
4085 >    public static class KeySetView<K,V> extends CollectionView<K,V,K>
4086 >        implements Set<K>, java.io.Serializable {
4087          private static final long serialVersionUID = 7249069246763182397L;
4088          private final V value;
4089          KeySetView(ConcurrentHashMap<K,V> map, V value) {  // non-public
# Line 4886 | Line 4120 | public class ConcurrentHashMap<K,V>
4120          /**
4121           * @return an iterator over the keys of the backing map
4122           */
4123 <        public Iterator<K> iterator() { return new KeyIterator<K,V>(map); }
4123 >        public Iterator<K> iterator() {
4124 >            Node<K,V>[] t;
4125 >            ConcurrentHashMap<K,V> m = map;
4126 >            int f = (t = m.table) == null ? 0 : t.length;
4127 >            return new KeyIterator<K,V>(t, f, 0, f, m);
4128 >        }
4129  
4130          /**
4131           * Adds the specified key to this set view by mapping the key to
# Line 4928 | Line 4167 | public class ConcurrentHashMap<K,V>
4167              return added;
4168          }
4169  
4170 +        public int hashCode() {
4171 +            int h = 0;
4172 +            for (K e : this)
4173 +                h += e.hashCode();
4174 +            return h;
4175 +        }
4176 +
4177 +        public boolean equals(Object o) {
4178 +            Set<?> c;
4179 +            return ((o instanceof Set) &&
4180 +                    ((c = (Set<?>)o) == this ||
4181 +                     (containsAll(c) && c.containsAll(this))));
4182 +        }
4183 +
4184          public Spliterator<K> spliterator() {
4185 <            return new KeyIterator<>(map, null);
4185 >            Node<K,V>[] t;
4186 >            ConcurrentHashMap<K,V> m = map;
4187 >            long n = m.sumCount();
4188 >            int f = (t = m.table) == null ? 0 : t.length;
4189 >            return new KeySpliterator<K,V>(t, f, 0, f, n < 0L ? 0L : n);
4190          }
4191  
4192 +        public void forEach(Consumer<? super K> action) {
4193 +            if (action == null) throw new NullPointerException();
4194 +            Node<K,V>[] t;
4195 +            if ((t = map.table) != null) {
4196 +                Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
4197 +                for (Node<K,V> p; (p = it.advance()) != null; )
4198 +                    action.accept((K)p.key);
4199 +            }
4200 +        }
4201      }
4202  
4203      /**
4204       * A view of a ConcurrentHashMap as a {@link Collection} of
4205       * values, in which additions are disabled. This class cannot be
4206       * directly instantiated. See {@link #values()}.
4941     *
4942     * <p>The view's {@code iterator} is a "weakly consistent" iterator
4943     * that will never throw {@link ConcurrentModificationException},
4944     * and guarantees to traverse elements as they existed upon
4945     * construction of the iterator, and may (but is not guaranteed to)
4946     * reflect any modifications subsequent to construction.
4207       */
4208 <    public static final class ValuesView<K,V>
4209 <            extends CHMCollectionView<K,V,V>
4950 <            implements Collection<V>, java.io.Serializable {
4208 >    static final class ValuesView<K,V> extends CollectionView<K,V,V>
4209 >        implements Collection<V>, java.io.Serializable {
4210          private static final long serialVersionUID = 2249069246763182397L;
4211          ValuesView(ConcurrentHashMap<K,V> map) { super(map); }
4212          public final boolean contains(Object o) {
4213              return map.containsValue(o);
4214          }
4215 +
4216          public final boolean remove(Object o) {
4217              if (o != null) {
4218                  for (Iterator<V> it = iterator(); it.hasNext();) {
# Line 4965 | Line 4225 | public class ConcurrentHashMap<K,V>
4225              return false;
4226          }
4227  
4968        /**
4969         * @return an iterator over the values of the backing map
4970         */
4228          public final Iterator<V> iterator() {
4229 <            return new ValueIterator<K,V>(map);
4229 >            ConcurrentHashMap<K,V> m = map;
4230 >            Node<K,V>[] t;
4231 >            int f = (t = m.table) == null ? 0 : t.length;
4232 >            return new ValueIterator<K,V>(t, f, 0, f, m);
4233          }
4234  
4975        /** Always throws {@link UnsupportedOperationException}. */
4235          public final boolean add(V e) {
4236              throw new UnsupportedOperationException();
4237          }
4979        /** Always throws {@link UnsupportedOperationException}. */
4238          public final boolean addAll(Collection<? extends V> c) {
4239              throw new UnsupportedOperationException();
4240          }
4241  
4242          public Spliterator<V> spliterator() {
4243 <            return new ValueIterator<K,V>(map, null);
4243 >            Node<K,V>[] t;
4244 >            ConcurrentHashMap<K,V> m = map;
4245 >            long n = m.sumCount();
4246 >            int f = (t = m.table) == null ? 0 : t.length;
4247 >            return new ValueSpliterator<K,V>(t, f, 0, f, n < 0L ? 0L : n);
4248          }
4249  
4250 +        public void forEach(Consumer<? super V> action) {
4251 +            if (action == null) throw new NullPointerException();
4252 +            Node<K,V>[] t;
4253 +            if ((t = map.table) != null) {
4254 +                Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
4255 +                for (Node<K,V> p; (p = it.advance()) != null; )
4256 +                    action.accept(p.val);
4257 +            }
4258 +        }
4259      }
4260  
4261      /**
# Line 4992 | Line 4263 | public class ConcurrentHashMap<K,V>
4263       * entries.  This class cannot be directly instantiated. See
4264       * {@link #entrySet()}.
4265       */
4266 <    public static final class EntrySetView<K,V>
4267 <            extends CHMSetView<K,V,Map.Entry<K,V>>
4997 <            implements Set<Map.Entry<K,V>>, java.io.Serializable {
4266 >    static final class EntrySetView<K,V> extends CollectionView<K,V,Map.Entry<K,V>>
4267 >        implements Set<Map.Entry<K,V>>, java.io.Serializable {
4268          private static final long serialVersionUID = 2249069246763182397L;
4269          EntrySetView(ConcurrentHashMap<K,V> map) { super(map); }
4270  
4271 <        public final boolean contains(Object o) {
4271 >        public boolean contains(Object o) {
4272              Object k, v, r; Map.Entry<?,?> e;
4273              return ((o instanceof Map.Entry) &&
4274                      (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
# Line 5006 | Line 4276 | public class ConcurrentHashMap<K,V>
4276                      (v = e.getValue()) != null &&
4277                      (v == r || v.equals(r)));
4278          }
4279 <        public final boolean remove(Object o) {
4279 >
4280 >        public boolean remove(Object o) {
4281              Object k, v; Map.Entry<?,?> e;
4282              return ((o instanceof Map.Entry) &&
4283                      (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
# Line 5017 | Line 4288 | public class ConcurrentHashMap<K,V>
4288          /**
4289           * @return an iterator over the entries of the backing map
4290           */
4291 <        public final Iterator<Map.Entry<K,V>> iterator() {
4292 <            return new EntryIterator<K,V>(map);
4291 >        public Iterator<Map.Entry<K,V>> iterator() {
4292 >            ConcurrentHashMap<K,V> m = map;
4293 >            Node<K,V>[] t;
4294 >            int f = (t = m.table) == null ? 0 : t.length;
4295 >            return new EntryIterator<K,V>(t, f, 0, f, m);
4296          }
4297  
4298 <        /**
5025 <         * Adds the specified mapping to this view.
5026 <         *
5027 <         * @param e mapping to be added
5028 <         * @return {@code true} if this set changed as a result of the call
5029 <         * @throws NullPointerException if the entry, its key, or its
5030 <         * value is null
5031 <         */
5032 <        public final boolean add(Entry<K,V> e) {
4298 >        public boolean add(Entry<K,V> e) {
4299              return map.internalPut(e.getKey(), e.getValue(), false) == null;
4300          }
4301  
4302 <        /**
5037 <         * Adds all of the mappings in the specified collection to this
5038 <         * set, as if by calling {@link #add(Map.Entry)} on each one.
5039 <         * @param c the mappings to be inserted into this set
5040 <         * @return {@code true} if this set changed as a result of the call
5041 <         * @throws NullPointerException if the collection or any of its
5042 <         * entries, keys, or values are null
5043 <         */
5044 <        public final boolean addAll(Collection<? extends Entry<K,V>> c) {
4302 >        public boolean addAll(Collection<? extends Entry<K,V>> c) {
4303              boolean added = false;
4304              for (Entry<K,V> e : c) {
4305                  if (add(e))
# Line 5050 | Line 4308 | public class ConcurrentHashMap<K,V>
4308              return added;
4309          }
4310  
4311 <        public Spliterator<Map.Entry<K,V>> spliterator() {
4312 <            return new EntryIterator<K,V>(map, null);
4313 <        }
4314 <
4315 <    }
4316 <
4317 <    // ---------------------------------------------------------------------
4318 <
4319 <    /**
4320 <     * Predefined tasks for performing bulk parallel operations on
5063 <     * ConcurrentHashMaps. These tasks follow the forms and rules used
5064 <     * for bulk operations. Each method has the same name, but returns
5065 <     * a task rather than invoking it. These methods may be useful in
5066 <     * custom applications such as submitting a task without waiting
5067 <     * for completion, using a custom pool, or combining with other
5068 <     * tasks.
5069 <     */
5070 <    public static class ForkJoinTasks {
5071 <        private ForkJoinTasks() {}
5072 <
5073 <        /**
5074 <         * Returns a task that when invoked, performs the given
5075 <         * action for each (key, value)
5076 <         *
5077 <         * @param map the map
5078 <         * @param action the action
5079 <         * @return the task
5080 <         */
5081 <        public static <K,V> ForkJoinTask<Void> forEach
5082 <            (ConcurrentHashMap<K,V> map,
5083 <             BiConsumer<? super K, ? super V> action) {
5084 <            if (action == null) throw new NullPointerException();
5085 <            return new ForEachMappingTask<K,V>(map, null, -1, action);
5086 <        }
5087 <
5088 <        /**
5089 <         * Returns a task that when invoked, performs the given
5090 <         * action for each non-null transformation of each (key, value)
5091 <         *
5092 <         * @param map the map
5093 <         * @param transformer a function returning the transformation
5094 <         * for an element, or null if there is no transformation (in
5095 <         * which case the action is not applied)
5096 <         * @param action the action
5097 <         * @return the task
5098 <         */
5099 <        public static <K,V,U> ForkJoinTask<Void> forEach
5100 <            (ConcurrentHashMap<K,V> map,
5101 <             BiFunction<? super K, ? super V, ? extends U> transformer,
5102 <             Consumer<? super U> action) {
5103 <            if (transformer == null || action == null)
5104 <                throw new NullPointerException();
5105 <            return new ForEachTransformedMappingTask<K,V,U>
5106 <                (map, null, -1, transformer, action);
5107 <        }
5108 <
5109 <        /**
5110 <         * Returns a task that when invoked, returns a non-null result
5111 <         * from applying the given search function on each (key,
5112 <         * value), or null if none. Upon success, further element
5113 <         * processing is suppressed and the results of any other
5114 <         * parallel invocations of the search function are ignored.
5115 <         *
5116 <         * @param map the map
5117 <         * @param searchFunction a function returning a non-null
5118 <         * result on success, else null
5119 <         * @return the task
5120 <         */
5121 <        public static <K,V,U> ForkJoinTask<U> search
5122 <            (ConcurrentHashMap<K,V> map,
5123 <             BiFunction<? super K, ? super V, ? extends U> searchFunction) {
5124 <            if (searchFunction == null) throw new NullPointerException();
5125 <            return new SearchMappingsTask<K,V,U>
5126 <                (map, null, -1, searchFunction,
5127 <                 new AtomicReference<U>());
5128 <        }
5129 <
5130 <        /**
5131 <         * Returns a task that when invoked, returns the result of
5132 <         * accumulating the given transformation of all (key, value) pairs
5133 <         * using the given reducer to combine values, or null if none.
5134 <         *
5135 <         * @param map the map
5136 <         * @param transformer a function returning the transformation
5137 <         * for an element, or null if there is no transformation (in
5138 <         * which case it is not combined)
5139 <         * @param reducer a commutative associative combining function
5140 <         * @return the task
5141 <         */
5142 <        public static <K,V,U> ForkJoinTask<U> reduce
5143 <            (ConcurrentHashMap<K,V> map,
5144 <             BiFunction<? super K, ? super V, ? extends U> transformer,
5145 <             BiFunction<? super U, ? super U, ? extends U> reducer) {
5146 <            if (transformer == null || reducer == null)
5147 <                throw new NullPointerException();
5148 <            return new MapReduceMappingsTask<K,V,U>
5149 <                (map, null, -1, null, transformer, reducer);
5150 <        }
5151 <
5152 <        /**
5153 <         * Returns a task that when invoked, returns the result of
5154 <         * accumulating the given transformation of all (key, value) pairs
5155 <         * using the given reducer to combine values, and the given
5156 <         * basis as an identity value.
5157 <         *
5158 <         * @param map the map
5159 <         * @param transformer a function returning the transformation
5160 <         * for an element
5161 <         * @param basis the identity (initial default value) for the reduction
5162 <         * @param reducer a commutative associative combining function
5163 <         * @return the task
5164 <         */
5165 <        public static <K,V> ForkJoinTask<Double> reduceToDouble
5166 <            (ConcurrentHashMap<K,V> map,
5167 <             ToDoubleBiFunction<? super K, ? super V> transformer,
5168 <             double basis,
5169 <             DoubleBinaryOperator reducer) {
5170 <            if (transformer == null || reducer == null)
5171 <                throw new NullPointerException();
5172 <            return new MapReduceMappingsToDoubleTask<K,V>
5173 <                (map, null, -1, null, transformer, basis, reducer);
5174 <        }
5175 <
5176 <        /**
5177 <         * Returns a task that when invoked, returns the result of
5178 <         * accumulating the given transformation of all (key, value) pairs
5179 <         * using the given reducer to combine values, and the given
5180 <         * basis as an identity value.
5181 <         *
5182 <         * @param map the map
5183 <         * @param transformer a function returning the transformation
5184 <         * for an element
5185 <         * @param basis the identity (initial default value) for the reduction
5186 <         * @param reducer a commutative associative combining function
5187 <         * @return the task
5188 <         */
5189 <        public static <K,V> ForkJoinTask<Long> reduceToLong
5190 <            (ConcurrentHashMap<K,V> map,
5191 <             ToLongBiFunction<? super K, ? super V> transformer,
5192 <             long basis,
5193 <             LongBinaryOperator reducer) {
5194 <            if (transformer == null || reducer == null)
5195 <                throw new NullPointerException();
5196 <            return new MapReduceMappingsToLongTask<K,V>
5197 <                (map, null, -1, null, transformer, basis, reducer);
5198 <        }
5199 <
5200 <        /**
5201 <         * Returns a task that when invoked, returns the result of
5202 <         * accumulating the given transformation of all (key, value) pairs
5203 <         * using the given reducer to combine values, and the given
5204 <         * basis as an identity value.
5205 <         *
5206 <         * @param map the map
5207 <         * @param transformer a function returning the transformation
5208 <         * for an element
5209 <         * @param basis the identity (initial default value) for the reduction
5210 <         * @param reducer a commutative associative combining function
5211 <         * @return the task
5212 <         */
5213 <        public static <K,V> ForkJoinTask<Integer> reduceToInt
5214 <            (ConcurrentHashMap<K,V> map,
5215 <             ToIntBiFunction<? super K, ? super V> transformer,
5216 <             int basis,
5217 <             IntBinaryOperator reducer) {
5218 <            if (transformer == null || reducer == null)
5219 <                throw new NullPointerException();
5220 <            return new MapReduceMappingsToIntTask<K,V>
5221 <                (map, null, -1, null, transformer, basis, reducer);
5222 <        }
5223 <
5224 <        /**
5225 <         * Returns a task that when invoked, performs the given action
5226 <         * for each key.
5227 <         *
5228 <         * @param map the map
5229 <         * @param action the action
5230 <         * @return the task
5231 <         */
5232 <        public static <K,V> ForkJoinTask<Void> forEachKey
5233 <            (ConcurrentHashMap<K,V> map,
5234 <             Consumer<? super K> action) {
5235 <            if (action == null) throw new NullPointerException();
5236 <            return new ForEachKeyTask<K,V>(map, null, -1, action);
5237 <        }
5238 <
5239 <        /**
5240 <         * Returns a task that when invoked, performs the given action
5241 <         * for each non-null transformation of each key.
5242 <         *
5243 <         * @param map the map
5244 <         * @param transformer a function returning the transformation
5245 <         * for an element, or null if there is no transformation (in
5246 <         * which case the action is not applied)
5247 <         * @param action the action
5248 <         * @return the task
5249 <         */
5250 <        public static <K,V,U> ForkJoinTask<Void> forEachKey
5251 <            (ConcurrentHashMap<K,V> map,
5252 <             Function<? super K, ? extends U> transformer,
5253 <             Consumer<? super U> action) {
5254 <            if (transformer == null || action == null)
5255 <                throw new NullPointerException();
5256 <            return new ForEachTransformedKeyTask<K,V,U>
5257 <                (map, null, -1, transformer, action);
5258 <        }
5259 <
5260 <        /**
5261 <         * Returns a task that when invoked, returns a non-null result
5262 <         * from applying the given search function on each key, or
5263 <         * null if none.  Upon success, further element processing is
5264 <         * suppressed and the results of any other parallel
5265 <         * invocations of the search function are ignored.
5266 <         *
5267 <         * @param map the map
5268 <         * @param searchFunction a function returning a non-null
5269 <         * result on success, else null
5270 <         * @return the task
5271 <         */
5272 <        public static <K,V,U> ForkJoinTask<U> searchKeys
5273 <            (ConcurrentHashMap<K,V> map,
5274 <             Function<? super K, ? extends U> searchFunction) {
5275 <            if (searchFunction == null) throw new NullPointerException();
5276 <            return new SearchKeysTask<K,V,U>
5277 <                (map, null, -1, searchFunction,
5278 <                 new AtomicReference<U>());
5279 <        }
5280 <
5281 <        /**
5282 <         * Returns a task that when invoked, returns the result of
5283 <         * accumulating all keys using the given reducer to combine
5284 <         * values, or null if none.
5285 <         *
5286 <         * @param map the map
5287 <         * @param reducer a commutative associative combining function
5288 <         * @return the task
5289 <         */
5290 <        public static <K,V> ForkJoinTask<K> reduceKeys
5291 <            (ConcurrentHashMap<K,V> map,
5292 <             BiFunction<? super K, ? super K, ? extends K> reducer) {
5293 <            if (reducer == null) throw new NullPointerException();
5294 <            return new ReduceKeysTask<K,V>
5295 <                (map, null, -1, null, reducer);
5296 <        }
5297 <
5298 <        /**
5299 <         * Returns a task that when invoked, returns the result of
5300 <         * accumulating the given transformation of all keys using the given
5301 <         * reducer to combine values, or null if none.
5302 <         *
5303 <         * @param map the map
5304 <         * @param transformer a function returning the transformation
5305 <         * for an element, or null if there is no transformation (in
5306 <         * which case it is not combined)
5307 <         * @param reducer a commutative associative combining function
5308 <         * @return the task
5309 <         */
5310 <        public static <K,V,U> ForkJoinTask<U> reduceKeys
5311 <            (ConcurrentHashMap<K,V> map,
5312 <             Function<? super K, ? extends U> transformer,
5313 <             BiFunction<? super U, ? super U, ? extends U> reducer) {
5314 <            if (transformer == null || reducer == null)
5315 <                throw new NullPointerException();
5316 <            return new MapReduceKeysTask<K,V,U>
5317 <                (map, null, -1, null, transformer, reducer);
5318 <        }
5319 <
5320 <        /**
5321 <         * Returns a task that when invoked, returns the result of
5322 <         * accumulating the given transformation of all keys using the given
5323 <         * reducer to combine values, and the given basis as an
5324 <         * identity value.
5325 <         *
5326 <         * @param map the map
5327 <         * @param transformer a function returning the transformation
5328 <         * for an element
5329 <         * @param basis the identity (initial default value) for the reduction
5330 <         * @param reducer a commutative associative combining function
5331 <         * @return the task
5332 <         */
5333 <        public static <K,V> ForkJoinTask<Double> reduceKeysToDouble
5334 <            (ConcurrentHashMap<K,V> map,
5335 <             ToDoubleFunction<? super K> transformer,
5336 <             double basis,
5337 <             DoubleBinaryOperator reducer) {
5338 <            if (transformer == null || reducer == null)
5339 <                throw new NullPointerException();
5340 <            return new MapReduceKeysToDoubleTask<K,V>
5341 <                (map, null, -1, null, transformer, basis, reducer);
5342 <        }
5343 <
5344 <        /**
5345 <         * Returns a task that when invoked, returns the result of
5346 <         * accumulating the given transformation of all keys using the given
5347 <         * reducer to combine values, and the given basis as an
5348 <         * identity value.
5349 <         *
5350 <         * @param map the map
5351 <         * @param transformer a function returning the transformation
5352 <         * for an element
5353 <         * @param basis the identity (initial default value) for the reduction
5354 <         * @param reducer a commutative associative combining function
5355 <         * @return the task
5356 <         */
5357 <        public static <K,V> ForkJoinTask<Long> reduceKeysToLong
5358 <            (ConcurrentHashMap<K,V> map,
5359 <             ToLongFunction<? super K> transformer,
5360 <             long basis,
5361 <             LongBinaryOperator reducer) {
5362 <            if (transformer == null || reducer == null)
5363 <                throw new NullPointerException();
5364 <            return new MapReduceKeysToLongTask<K,V>
5365 <                (map, null, -1, null, transformer, basis, reducer);
5366 <        }
5367 <
5368 <        /**
5369 <         * Returns a task that when invoked, returns the result of
5370 <         * accumulating the given transformation of all keys using the given
5371 <         * reducer to combine values, and the given basis as an
5372 <         * identity value.
5373 <         *
5374 <         * @param map the map
5375 <         * @param transformer a function returning the transformation
5376 <         * for an element
5377 <         * @param basis the identity (initial default value) for the reduction
5378 <         * @param reducer a commutative associative combining function
5379 <         * @return the task
5380 <         */
5381 <        public static <K,V> ForkJoinTask<Integer> reduceKeysToInt
5382 <            (ConcurrentHashMap<K,V> map,
5383 <             ToIntFunction<? super K> transformer,
5384 <             int basis,
5385 <             IntBinaryOperator reducer) {
5386 <            if (transformer == null || reducer == null)
5387 <                throw new NullPointerException();
5388 <            return new MapReduceKeysToIntTask<K,V>
5389 <                (map, null, -1, null, transformer, basis, reducer);
5390 <        }
5391 <
5392 <        /**
5393 <         * Returns a task that when invoked, performs the given action
5394 <         * for each value.
5395 <         *
5396 <         * @param map the map
5397 <         * @param action the action
5398 <         * @return the task
5399 <         */
5400 <        public static <K,V> ForkJoinTask<Void> forEachValue
5401 <            (ConcurrentHashMap<K,V> map,
5402 <             Consumer<? super V> action) {
5403 <            if (action == null) throw new NullPointerException();
5404 <            return new ForEachValueTask<K,V>(map, null, -1, action);
5405 <        }
5406 <
5407 <        /**
5408 <         * Returns a task that when invoked, performs the given action
5409 <         * for each non-null transformation of each value.
5410 <         *
5411 <         * @param map the map
5412 <         * @param transformer a function returning the transformation
5413 <         * for an element, or null if there is no transformation (in
5414 <         * which case the action is not applied)
5415 <         * @param action the action
5416 <         * @return the task
5417 <         */
5418 <        public static <K,V,U> ForkJoinTask<Void> forEachValue
5419 <            (ConcurrentHashMap<K,V> map,
5420 <             Function<? super V, ? extends U> transformer,
5421 <             Consumer<? super U> action) {
5422 <            if (transformer == null || action == null)
5423 <                throw new NullPointerException();
5424 <            return new ForEachTransformedValueTask<K,V,U>
5425 <                (map, null, -1, transformer, action);
5426 <        }
5427 <
5428 <        /**
5429 <         * Returns a task that when invoked, returns a non-null result
5430 <         * from applying the given search function on each value, or
5431 <         * null if none.  Upon success, further element processing is
5432 <         * suppressed and the results of any other parallel
5433 <         * invocations of the search function are ignored.
5434 <         *
5435 <         * @param map the map
5436 <         * @param searchFunction a function returning a non-null
5437 <         * result on success, else null
5438 <         * @return the task
5439 <         */
5440 <        public static <K,V,U> ForkJoinTask<U> searchValues
5441 <            (ConcurrentHashMap<K,V> map,
5442 <             Function<? super V, ? extends U> searchFunction) {
5443 <            if (searchFunction == null) throw new NullPointerException();
5444 <            return new SearchValuesTask<K,V,U>
5445 <                (map, null, -1, searchFunction,
5446 <                 new AtomicReference<U>());
5447 <        }
5448 <
5449 <        /**
5450 <         * Returns a task that when invoked, returns the result of
5451 <         * accumulating all values using the given reducer to combine
5452 <         * values, or null if none.
5453 <         *
5454 <         * @param map the map
5455 <         * @param reducer a commutative associative combining function
5456 <         * @return the task
5457 <         */
5458 <        public static <K,V> ForkJoinTask<V> reduceValues
5459 <            (ConcurrentHashMap<K,V> map,
5460 <             BiFunction<? super V, ? super V, ? extends V> reducer) {
5461 <            if (reducer == null) throw new NullPointerException();
5462 <            return new ReduceValuesTask<K,V>
5463 <                (map, null, -1, null, reducer);
5464 <        }
5465 <
5466 <        /**
5467 <         * Returns a task that when invoked, returns the result of
5468 <         * accumulating the given transformation of all values using the
5469 <         * given reducer to combine values, or null if none.
5470 <         *
5471 <         * @param map the map
5472 <         * @param transformer a function returning the transformation
5473 <         * for an element, or null if there is no transformation (in
5474 <         * which case it is not combined)
5475 <         * @param reducer a commutative associative combining function
5476 <         * @return the task
5477 <         */
5478 <        public static <K,V,U> ForkJoinTask<U> reduceValues
5479 <            (ConcurrentHashMap<K,V> map,
5480 <             Function<? super V, ? extends U> transformer,
5481 <             BiFunction<? super U, ? super U, ? extends U> reducer) {
5482 <            if (transformer == null || reducer == null)
5483 <                throw new NullPointerException();
5484 <            return new MapReduceValuesTask<K,V,U>
5485 <                (map, null, -1, null, transformer, reducer);
5486 <        }
5487 <
5488 <        /**
5489 <         * Returns a task that when invoked, returns the result of
5490 <         * accumulating the given transformation of all values using the
5491 <         * given reducer to combine values, and the given basis as an
5492 <         * identity value.
5493 <         *
5494 <         * @param map the map
5495 <         * @param transformer a function returning the transformation
5496 <         * for an element
5497 <         * @param basis the identity (initial default value) for the reduction
5498 <         * @param reducer a commutative associative combining function
5499 <         * @return the task
5500 <         */
5501 <        public static <K,V> ForkJoinTask<Double> reduceValuesToDouble
5502 <            (ConcurrentHashMap<K,V> map,
5503 <             ToDoubleFunction<? super V> transformer,
5504 <             double basis,
5505 <             DoubleBinaryOperator reducer) {
5506 <            if (transformer == null || reducer == null)
5507 <                throw new NullPointerException();
5508 <            return new MapReduceValuesToDoubleTask<K,V>
5509 <                (map, null, -1, null, transformer, basis, reducer);
4311 >        public final int hashCode() {
4312 >            int h = 0;
4313 >            Node<K,V>[] t;
4314 >            if ((t = map.table) != null) {
4315 >                Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
4316 >                for (Node<K,V> p; (p = it.advance()) != null; ) {
4317 >                    h += p.hashCode();
4318 >                }
4319 >            }
4320 >            return h;
4321          }
4322  
4323 <        /**
4324 <         * Returns a task that when invoked, returns the result of
4325 <         * accumulating the given transformation of all values using the
4326 <         * given reducer to combine values, and the given basis as an
4327 <         * identity value.
5517 <         *
5518 <         * @param map the map
5519 <         * @param transformer a function returning the transformation
5520 <         * for an element
5521 <         * @param basis the identity (initial default value) for the reduction
5522 <         * @param reducer a commutative associative combining function
5523 <         * @return the task
5524 <         */
5525 <        public static <K,V> ForkJoinTask<Long> reduceValuesToLong
5526 <            (ConcurrentHashMap<K,V> map,
5527 <             ToLongFunction<? super V> transformer,
5528 <             long basis,
5529 <             LongBinaryOperator reducer) {
5530 <            if (transformer == null || reducer == null)
5531 <                throw new NullPointerException();
5532 <            return new MapReduceValuesToLongTask<K,V>
5533 <                (map, null, -1, null, transformer, basis, reducer);
4323 >        public final boolean equals(Object o) {
4324 >            Set<?> c;
4325 >            return ((o instanceof Set) &&
4326 >                    ((c = (Set<?>)o) == this ||
4327 >                     (containsAll(c) && c.containsAll(this))));
4328          }
4329  
4330 <        /**
4331 <         * Returns a task that when invoked, returns the result of
4332 <         * accumulating the given transformation of all values using the
4333 <         * given reducer to combine values, and the given basis as an
4334 <         * identity value.
4335 <         *
5542 <         * @param map the map
5543 <         * @param transformer a function returning the transformation
5544 <         * for an element
5545 <         * @param basis the identity (initial default value) for the reduction
5546 <         * @param reducer a commutative associative combining function
5547 <         * @return the task
5548 <         */
5549 <        public static <K,V> ForkJoinTask<Integer> reduceValuesToInt
5550 <            (ConcurrentHashMap<K,V> map,
5551 <             ToIntFunction<? super V> transformer,
5552 <             int basis,
5553 <             IntBinaryOperator reducer) {
5554 <            if (transformer == null || reducer == null)
5555 <                throw new NullPointerException();
5556 <            return new MapReduceValuesToIntTask<K,V>
5557 <                (map, null, -1, null, transformer, basis, reducer);
4330 >        public Spliterator<Map.Entry<K,V>> spliterator() {
4331 >            Node<K,V>[] t;
4332 >            ConcurrentHashMap<K,V> m = map;
4333 >            long n = m.sumCount();
4334 >            int f = (t = m.table) == null ? 0 : t.length;
4335 >            return new EntrySpliterator<K,V>(t, f, 0, f, n < 0L ? 0L : n, m);
4336          }
4337  
4338 <        /**
5561 <         * Returns a task that when invoked, perform the given action
5562 <         * for each entry.
5563 <         *
5564 <         * @param map the map
5565 <         * @param action the action
5566 <         * @return the task
5567 <         */
5568 <        public static <K,V> ForkJoinTask<Void> forEachEntry
5569 <            (ConcurrentHashMap<K,V> map,
5570 <             Consumer<? super Map.Entry<K,V>> action) {
4338 >        public void forEach(Consumer<? super Map.Entry<K,V>> action) {
4339              if (action == null) throw new NullPointerException();
4340 <            return new ForEachEntryTask<K,V>(map, null, -1, action);
4341 <        }
4342 <
4343 <        /**
4344 <         * Returns a task that when invoked, perform the given action
4345 <         * for each non-null transformation of each entry.
5578 <         *
5579 <         * @param map the map
5580 <         * @param transformer a function returning the transformation
5581 <         * for an element, or null if there is no transformation (in
5582 <         * which case the action is not applied)
5583 <         * @param action the action
5584 <         * @return the task
5585 <         */
5586 <        public static <K,V,U> ForkJoinTask<Void> forEachEntry
5587 <            (ConcurrentHashMap<K,V> map,
5588 <             Function<Map.Entry<K,V>, ? extends U> transformer,
5589 <             Consumer<? super U> action) {
5590 <            if (transformer == null || action == null)
5591 <                throw new NullPointerException();
5592 <            return new ForEachTransformedEntryTask<K,V,U>
5593 <                (map, null, -1, transformer, action);
5594 <        }
5595 <
5596 <        /**
5597 <         * Returns a task that when invoked, returns a non-null result
5598 <         * from applying the given search function on each entry, or
5599 <         * null if none.  Upon success, further element processing is
5600 <         * suppressed and the results of any other parallel
5601 <         * invocations of the search function are ignored.
5602 <         *
5603 <         * @param map the map
5604 <         * @param searchFunction a function returning a non-null
5605 <         * result on success, else null
5606 <         * @return the task
5607 <         */
5608 <        public static <K,V,U> ForkJoinTask<U> searchEntries
5609 <            (ConcurrentHashMap<K,V> map,
5610 <             Function<Map.Entry<K,V>, ? extends U> searchFunction) {
5611 <            if (searchFunction == null) throw new NullPointerException();
5612 <            return new SearchEntriesTask<K,V,U>
5613 <                (map, null, -1, searchFunction,
5614 <                 new AtomicReference<U>());
5615 <        }
5616 <
5617 <        /**
5618 <         * Returns a task that when invoked, returns the result of
5619 <         * accumulating all entries using the given reducer to combine
5620 <         * values, or null if none.
5621 <         *
5622 <         * @param map the map
5623 <         * @param reducer a commutative associative combining function
5624 <         * @return the task
5625 <         */
5626 <        public static <K,V> ForkJoinTask<Map.Entry<K,V>> reduceEntries
5627 <            (ConcurrentHashMap<K,V> map,
5628 <             BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
5629 <            if (reducer == null) throw new NullPointerException();
5630 <            return new ReduceEntriesTask<K,V>
5631 <                (map, null, -1, null, reducer);
4340 >            Node<K,V>[] t;
4341 >            if ((t = map.table) != null) {
4342 >                Traverser<K,V> it = new Traverser<K,V>(t, t.length, 0, t.length);
4343 >                for (Node<K,V> p; (p = it.advance()) != null; )
4344 >                    action.accept(new MapEntry<K,V>((K)p.key, p.val, map));
4345 >            }
4346          }
4347  
4348 <        /**
5635 <         * Returns a task that when invoked, returns the result of
5636 <         * accumulating the given transformation of all entries using the
5637 <         * given reducer to combine values, or null if none.
5638 <         *
5639 <         * @param map the map
5640 <         * @param transformer a function returning the transformation
5641 <         * for an element, or null if there is no transformation (in
5642 <         * which case it is not combined)
5643 <         * @param reducer a commutative associative combining function
5644 <         * @return the task
5645 <         */
5646 <        public static <K,V,U> ForkJoinTask<U> reduceEntries
5647 <            (ConcurrentHashMap<K,V> map,
5648 <             Function<Map.Entry<K,V>, ? extends U> transformer,
5649 <             BiFunction<? super U, ? super U, ? extends U> reducer) {
5650 <            if (transformer == null || reducer == null)
5651 <                throw new NullPointerException();
5652 <            return new MapReduceEntriesTask<K,V,U>
5653 <                (map, null, -1, null, transformer, reducer);
5654 <        }
4348 >    }
4349  
4350 <        /**
5657 <         * Returns a task that when invoked, returns the result of
5658 <         * accumulating the given transformation of all entries using the
5659 <         * given reducer to combine values, and the given basis as an
5660 <         * identity value.
5661 <         *
5662 <         * @param map the map
5663 <         * @param transformer a function returning the transformation
5664 <         * for an element
5665 <         * @param basis the identity (initial default value) for the reduction
5666 <         * @param reducer a commutative associative combining function
5667 <         * @return the task
5668 <         */
5669 <        public static <K,V> ForkJoinTask<Double> reduceEntriesToDouble
5670 <            (ConcurrentHashMap<K,V> map,
5671 <             ToDoubleFunction<Map.Entry<K,V>> transformer,
5672 <             double basis,
5673 <             DoubleBinaryOperator reducer) {
5674 <            if (transformer == null || reducer == null)
5675 <                throw new NullPointerException();
5676 <            return new MapReduceEntriesToDoubleTask<K,V>
5677 <                (map, null, -1, null, transformer, basis, reducer);
5678 <        }
4350 >    // -------------------------------------------------------
4351  
4352 <        /**
4353 <         * Returns a task that when invoked, returns the result of
4354 <         * accumulating the given transformation of all entries using the
4355 <         * given reducer to combine values, and the given basis as an
4356 <         * identity value.
4357 <         *
4358 <         * @param map the map
4359 <         * @param transformer a function returning the transformation
4360 <         * for an element
4361 <         * @param basis the identity (initial default value) for the reduction
4362 <         * @param reducer a commutative associative combining function
4363 <         * @return the task
4364 <         */
4365 <        public static <K,V> ForkJoinTask<Long> reduceEntriesToLong
4366 <            (ConcurrentHashMap<K,V> map,
4367 <             ToLongFunction<Map.Entry<K,V>> transformer,
4368 <             long basis,
4369 <             LongBinaryOperator reducer) {
4370 <            if (transformer == null || reducer == null)
4371 <                throw new NullPointerException();
4372 <            return new MapReduceEntriesToLongTask<K,V>
4373 <                (map, null, -1, null, transformer, basis, reducer);
4352 >    /**
4353 >     * Base class for bulk tasks. Repeats some fields and code from
4354 >     * class Traverser, because we need to subclass CountedCompleter.
4355 >     */
4356 >    static abstract class BulkTask<K,V,R> extends CountedCompleter<R> {
4357 >        Node<K,V>[] tab;        // same as Traverser
4358 >        Node<K,V> next;
4359 >        int index;
4360 >        int baseIndex;
4361 >        int baseLimit;
4362 >        final int baseSize;
4363 >        int batch;              // split control
4364 >
4365 >        BulkTask(BulkTask<K,V,?> par, int b, int i, int f, Node<K,V>[] t) {
4366 >            super(par);
4367 >            this.batch = b;
4368 >            this.index = this.baseIndex = i;
4369 >            if ((this.tab = t) == null)
4370 >                this.baseSize = this.baseLimit = 0;
4371 >            else if (par == null)
4372 >                this.baseSize = this.baseLimit = t.length;
4373 >            else {
4374 >                this.baseLimit = f;
4375 >                this.baseSize = par.baseSize;
4376 >            }
4377          }
4378  
4379          /**
4380 <         * Returns a task that when invoked, returns the result of
5706 <         * accumulating the given transformation of all entries using the
5707 <         * given reducer to combine values, and the given basis as an
5708 <         * identity value.
5709 <         *
5710 <         * @param map the map
5711 <         * @param transformer a function returning the transformation
5712 <         * for an element
5713 <         * @param basis the identity (initial default value) for the reduction
5714 <         * @param reducer a commutative associative combining function
5715 <         * @return the task
4380 >         * Same as Traverser version
4381           */
4382 <        public static <K,V> ForkJoinTask<Integer> reduceEntriesToInt
4383 <            (ConcurrentHashMap<K,V> map,
4384 <             ToIntFunction<Map.Entry<K,V>> transformer,
4385 <             int basis,
4386 <             IntBinaryOperator reducer) {
4387 <            if (transformer == null || reducer == null)
4388 <                throw new NullPointerException();
4389 <            return new MapReduceEntriesToIntTask<K,V>
4390 <                (map, null, -1, null, transformer, basis, reducer);
4382 >        final Node<K,V> advance() {
4383 >            Node<K,V> e;
4384 >            if ((e = next) != null)
4385 >                e = e.next;
4386 >            for (;;) {
4387 >                Node<K,V>[] t; int i, n; Object ek;
4388 >                if (e != null)
4389 >                    return next = e;
4390 >                if (baseIndex >= baseLimit || (t = tab) == null ||
4391 >                    (n = t.length) <= (i = index) || i < 0)
4392 >                    return next = null;
4393 >                if ((e = tabAt(t, index)) != null && e.hash < 0) {
4394 >                    if ((ek = e.key) instanceof TreeBin)
4395 >                        e = ((TreeBin<K,V>)ek).first;
4396 >                    else {
4397 >                        tab = (Node<K,V>[])ek;
4398 >                        e = null;
4399 >                        continue;
4400 >                    }
4401 >                }
4402 >                if ((index += baseSize) >= n)
4403 >                    index = ++baseIndex;
4404 >            }
4405          }
4406      }
4407  
5729    // -------------------------------------------------------
5730
4408      /*
4409       * Task classes. Coded in a regular but ugly format/style to
4410       * simplify checks that each variant differs in the right way from
# Line 5736 | Line 4413 | public class ConcurrentHashMap<K,V>
4413       * simplest hoisted bypass to help avoid convoluted traps.
4414       */
4415  
4416 <    @SuppressWarnings("serial") static final class ForEachKeyTask<K,V>
4417 <        extends Traverser<K,V,Void> {
4416 >    static final class ForEachKeyTask<K,V>
4417 >        extends BulkTask<K,V,Void> {
4418          final Consumer<? super K> action;
4419          ForEachKeyTask
4420 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
4420 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
4421               Consumer<? super K> action) {
4422 <            super(m, p, b);
4422 >            super(p, b, i, f, t);
4423              this.action = action;
4424          }
4425          public final void compute() {
4426              final Consumer<? super K> action;
4427              if ((action = this.action) != null) {
4428 <                for (int b; (b = preSplit()) > 0;)
4429 <                    new ForEachKeyTask<K,V>(map, this, b, action).fork();
4430 <                forEachKey(action);
4428 >                for (int i = baseIndex, f, h; batch > 0 &&
4429 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
4430 >                    addToPendingCount(1);
4431 >                    new ForEachKeyTask<K,V>
4432 >                        (this, batch >>>= 1, baseLimit = h, f, tab,
4433 >                         action).fork();
4434 >                }
4435 >                for (Node<K,V> p; (p = advance()) != null;)
4436 >                    action.accept((K)p.key);
4437                  propagateCompletion();
4438              }
4439          }
4440      }
4441  
4442 <    @SuppressWarnings("serial") static final class ForEachValueTask<K,V>
4443 <        extends Traverser<K,V,Void> {
4442 >    static final class ForEachValueTask<K,V>
4443 >        extends BulkTask<K,V,Void> {
4444          final Consumer<? super V> action;
4445          ForEachValueTask
4446 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
4446 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
4447               Consumer<? super V> action) {
4448 <            super(m, p, b);
4448 >            super(p, b, i, f, t);
4449              this.action = action;
4450          }
4451          public final void compute() {
4452              final Consumer<? super V> action;
4453              if ((action = this.action) != null) {
4454 <                for (int b; (b = preSplit()) > 0;)
4455 <                    new ForEachValueTask<K,V>(map, this, b, action).fork();
4456 <                forEachValue(action);
4454 >                for (int i = baseIndex, f, h; batch > 0 &&
4455 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
4456 >                    addToPendingCount(1);
4457 >                    new ForEachValueTask<K,V>
4458 >                        (this, batch >>>= 1, baseLimit = h, f, tab,
4459 >                         action).fork();
4460 >                }
4461 >                for (Node<K,V> p; (p = advance()) != null;)
4462 >                    action.accept(p.val);
4463                  propagateCompletion();
4464              }
4465          }
4466      }
4467  
4468 <    @SuppressWarnings("serial") static final class ForEachEntryTask<K,V>
4469 <        extends Traverser<K,V,Void> {
4468 >    static final class ForEachEntryTask<K,V>
4469 >        extends BulkTask<K,V,Void> {
4470          final Consumer<? super Entry<K,V>> action;
4471          ForEachEntryTask
4472 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
4472 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
4473               Consumer<? super Entry<K,V>> action) {
4474 <            super(m, p, b);
4474 >            super(p, b, i, f, t);
4475              this.action = action;
4476          }
4477          public final void compute() {
4478              final Consumer<? super Entry<K,V>> action;
4479              if ((action = this.action) != null) {
4480 <                for (int b; (b = preSplit()) > 0;)
4481 <                    new ForEachEntryTask<K,V>(map, this, b, action).fork();
4482 <                V v;
4483 <                while ((v = advanceValue()) != null)
4484 <                    action.accept(entryFor(nextKey, v));
4480 >                for (int i = baseIndex, f, h; batch > 0 &&
4481 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
4482 >                    addToPendingCount(1);
4483 >                    new ForEachEntryTask<K,V>
4484 >                        (this, batch >>>= 1, baseLimit = h, f, tab,
4485 >                         action).fork();
4486 >                }
4487 >                for (Node<K,V> p; (p = advance()) != null; )
4488 >                    action.accept(p);
4489                  propagateCompletion();
4490              }
4491          }
4492      }
4493  
4494 <    @SuppressWarnings("serial") static final class ForEachMappingTask<K,V>
4495 <        extends Traverser<K,V,Void> {
4494 >    static final class ForEachMappingTask<K,V>
4495 >        extends BulkTask<K,V,Void> {
4496          final BiConsumer<? super K, ? super V> action;
4497          ForEachMappingTask
4498 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
4498 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
4499               BiConsumer<? super K,? super V> action) {
4500 <            super(m, p, b);
4500 >            super(p, b, i, f, t);
4501              this.action = action;
4502          }
4503          public final void compute() {
4504              final BiConsumer<? super K, ? super V> action;
4505              if ((action = this.action) != null) {
4506 <                for (int b; (b = preSplit()) > 0;)
4507 <                    new ForEachMappingTask<K,V>(map, this, b, action).fork();
4508 <                V v;
4509 <                while ((v = advanceValue()) != null)
4510 <                    action.accept(nextKey, v);
4506 >                for (int i = baseIndex, f, h; batch > 0 &&
4507 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
4508 >                    addToPendingCount(1);
4509 >                    new ForEachMappingTask<K,V>
4510 >                        (this, batch >>>= 1, baseLimit = h, f, tab,
4511 >                         action).fork();
4512 >                }
4513 >                for (Node<K,V> p; (p = advance()) != null; )
4514 >                    action.accept((K)p.key, p.val);
4515                  propagateCompletion();
4516              }
4517          }
4518      }
4519  
4520 <    @SuppressWarnings("serial") static final class ForEachTransformedKeyTask<K,V,U>
4521 <        extends Traverser<K,V,Void> {
4520 >    static final class ForEachTransformedKeyTask<K,V,U>
4521 >        extends BulkTask<K,V,Void> {
4522          final Function<? super K, ? extends U> transformer;
4523          final Consumer<? super U> action;
4524          ForEachTransformedKeyTask
4525 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
4525 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
4526               Function<? super K, ? extends U> transformer, Consumer<? super U> action) {
4527 <            super(m, p, b);
4527 >            super(p, b, i, f, t);
4528              this.transformer = transformer; this.action = action;
4529          }
4530          public final void compute() {
# Line 5835 | Line 4532 | public class ConcurrentHashMap<K,V>
4532              final Consumer<? super U> action;
4533              if ((transformer = this.transformer) != null &&
4534                  (action = this.action) != null) {
4535 <                for (int b; (b = preSplit()) > 0;)
4535 >                for (int i = baseIndex, f, h; batch > 0 &&
4536 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
4537 >                    addToPendingCount(1);
4538                      new ForEachTransformedKeyTask<K,V,U>
4539 <                        (map, this, b, transformer, action).fork();
4540 <                K k; U u;
4541 <                while ((k = advanceKey()) != null) {
4542 <                    if ((u = transformer.apply(k)) != null)
4539 >                        (this, batch >>>= 1, baseLimit = h, f, tab,
4540 >                         transformer, action).fork();
4541 >                }
4542 >                for (Node<K,V> p; (p = advance()) != null; ) {
4543 >                    U u;
4544 >                    if ((u = transformer.apply((K)p.key)) != null)
4545                          action.accept(u);
4546                  }
4547                  propagateCompletion();
# Line 5848 | Line 4549 | public class ConcurrentHashMap<K,V>
4549          }
4550      }
4551  
4552 <    @SuppressWarnings("serial") static final class ForEachTransformedValueTask<K,V,U>
4553 <        extends Traverser<K,V,Void> {
4552 >    static final class ForEachTransformedValueTask<K,V,U>
4553 >        extends BulkTask<K,V,Void> {
4554          final Function<? super V, ? extends U> transformer;
4555          final Consumer<? super U> action;
4556          ForEachTransformedValueTask
4557 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
4557 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
4558               Function<? super V, ? extends U> transformer, Consumer<? super U> action) {
4559 <            super(m, p, b);
4559 >            super(p, b, i, f, t);
4560              this.transformer = transformer; this.action = action;
4561          }
4562          public final void compute() {
# Line 5863 | Line 4564 | public class ConcurrentHashMap<K,V>
4564              final Consumer<? super U> action;
4565              if ((transformer = this.transformer) != null &&
4566                  (action = this.action) != null) {
4567 <                for (int b; (b = preSplit()) > 0;)
4567 >                for (int i = baseIndex, f, h; batch > 0 &&
4568 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
4569 >                    addToPendingCount(1);
4570                      new ForEachTransformedValueTask<K,V,U>
4571 <                        (map, this, b, transformer, action).fork();
4572 <                V v; U u;
4573 <                while ((v = advanceValue()) != null) {
4574 <                    if ((u = transformer.apply(v)) != null)
4571 >                        (this, batch >>>= 1, baseLimit = h, f, tab,
4572 >                         transformer, action).fork();
4573 >                }
4574 >                for (Node<K,V> p; (p = advance()) != null; ) {
4575 >                    U u;
4576 >                    if ((u = transformer.apply(p.val)) != null)
4577                          action.accept(u);
4578                  }
4579                  propagateCompletion();
# Line 5876 | Line 4581 | public class ConcurrentHashMap<K,V>
4581          }
4582      }
4583  
4584 <    @SuppressWarnings("serial") static final class ForEachTransformedEntryTask<K,V,U>
4585 <        extends Traverser<K,V,Void> {
4584 >    static final class ForEachTransformedEntryTask<K,V,U>
4585 >        extends BulkTask<K,V,Void> {
4586          final Function<Map.Entry<K,V>, ? extends U> transformer;
4587          final Consumer<? super U> action;
4588          ForEachTransformedEntryTask
4589 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
4589 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
4590               Function<Map.Entry<K,V>, ? extends U> transformer, Consumer<? super U> action) {
4591 <            super(m, p, b);
4591 >            super(p, b, i, f, t);
4592              this.transformer = transformer; this.action = action;
4593          }
4594          public final void compute() {
# Line 5891 | Line 4596 | public class ConcurrentHashMap<K,V>
4596              final Consumer<? super U> action;
4597              if ((transformer = this.transformer) != null &&
4598                  (action = this.action) != null) {
4599 <                for (int b; (b = preSplit()) > 0;)
4599 >                for (int i = baseIndex, f, h; batch > 0 &&
4600 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
4601 >                    addToPendingCount(1);
4602                      new ForEachTransformedEntryTask<K,V,U>
4603 <                        (map, this, b, transformer, action).fork();
4604 <                V v; U u;
4605 <                while ((v = advanceValue()) != null) {
4606 <                    if ((u = transformer.apply(entryFor(nextKey,
4607 <                                                        v))) != null)
4603 >                        (this, batch >>>= 1, baseLimit = h, f, tab,
4604 >                         transformer, action).fork();
4605 >                }
4606 >                for (Node<K,V> p; (p = advance()) != null; ) {
4607 >                    U u;
4608 >                    if ((u = transformer.apply(p)) != null)
4609                          action.accept(u);
4610                  }
4611                  propagateCompletion();
# Line 5905 | Line 4613 | public class ConcurrentHashMap<K,V>
4613          }
4614      }
4615  
4616 <    @SuppressWarnings("serial") static final class ForEachTransformedMappingTask<K,V,U>
4617 <        extends Traverser<K,V,Void> {
4616 >    static final class ForEachTransformedMappingTask<K,V,U>
4617 >        extends BulkTask<K,V,Void> {
4618          final BiFunction<? super K, ? super V, ? extends U> transformer;
4619          final Consumer<? super U> action;
4620          ForEachTransformedMappingTask
4621 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
4621 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
4622               BiFunction<? super K, ? super V, ? extends U> transformer,
4623               Consumer<? super U> action) {
4624 <            super(m, p, b);
4624 >            super(p, b, i, f, t);
4625              this.transformer = transformer; this.action = action;
4626          }
4627          public final void compute() {
# Line 5921 | Line 4629 | public class ConcurrentHashMap<K,V>
4629              final Consumer<? super U> action;
4630              if ((transformer = this.transformer) != null &&
4631                  (action = this.action) != null) {
4632 <                for (int b; (b = preSplit()) > 0;)
4632 >                for (int i = baseIndex, f, h; batch > 0 &&
4633 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
4634 >                    addToPendingCount(1);
4635                      new ForEachTransformedMappingTask<K,V,U>
4636 <                        (map, this, b, transformer, action).fork();
4637 <                V v; U u;
4638 <                while ((v = advanceValue()) != null) {
4639 <                    if ((u = transformer.apply(nextKey, v)) != null)
4636 >                        (this, batch >>>= 1, baseLimit = h, f, tab,
4637 >                         transformer, action).fork();
4638 >                }
4639 >                for (Node<K,V> p; (p = advance()) != null; ) {
4640 >                    U u;
4641 >                    if ((u = transformer.apply((K)p.key, p.val)) != null)
4642                          action.accept(u);
4643                  }
4644                  propagateCompletion();
# Line 5934 | Line 4646 | public class ConcurrentHashMap<K,V>
4646          }
4647      }
4648  
4649 <    @SuppressWarnings("serial") static final class SearchKeysTask<K,V,U>
4650 <        extends Traverser<K,V,U> {
4649 >    static final class SearchKeysTask<K,V,U>
4650 >        extends BulkTask<K,V,U> {
4651          final Function<? super K, ? extends U> searchFunction;
4652          final AtomicReference<U> result;
4653          SearchKeysTask
4654 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
4654 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
4655               Function<? super K, ? extends U> searchFunction,
4656               AtomicReference<U> result) {
4657 <            super(m, p, b);
4657 >            super(p, b, i, f, t);
4658              this.searchFunction = searchFunction; this.result = result;
4659          }
4660          public final U getRawResult() { return result.get(); }
# Line 5951 | Line 4663 | public class ConcurrentHashMap<K,V>
4663              final AtomicReference<U> result;
4664              if ((searchFunction = this.searchFunction) != null &&
4665                  (result = this.result) != null) {
4666 <                for (int b;;) {
4666 >                for (int i = baseIndex, f, h; batch > 0 &&
4667 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
4668                      if (result.get() != null)
4669                          return;
4670 <                    if ((b = preSplit()) <= 0)
5958 <                        break;
4670 >                    addToPendingCount(1);
4671                      new SearchKeysTask<K,V,U>
4672 <                        (map, this, b, searchFunction, result).fork();
4672 >                        (this, batch >>>= 1, baseLimit = h, f, tab,
4673 >                         searchFunction, result).fork();
4674                  }
4675                  while (result.get() == null) {
4676 <                    K k; U u;
4677 <                    if ((k = advanceKey()) == null) {
4676 >                    U u;
4677 >                    Node<K,V> p;
4678 >                    if ((p = advance()) == null) {
4679                          propagateCompletion();
4680                          break;
4681                      }
4682 <                    if ((u = searchFunction.apply(k)) != null) {
4682 >                    if ((u = searchFunction.apply((K)p.key)) != null) {
4683                          if (result.compareAndSet(null, u))
4684                              quietlyCompleteRoot();
4685                          break;
# Line 5975 | Line 4689 | public class ConcurrentHashMap<K,V>
4689          }
4690      }
4691  
4692 <    @SuppressWarnings("serial") static final class SearchValuesTask<K,V,U>
4693 <        extends Traverser<K,V,U> {
4692 >    static final class SearchValuesTask<K,V,U>
4693 >        extends BulkTask<K,V,U> {
4694          final Function<? super V, ? extends U> searchFunction;
4695          final AtomicReference<U> result;
4696          SearchValuesTask
4697 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
4697 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
4698               Function<? super V, ? extends U> searchFunction,
4699               AtomicReference<U> result) {
4700 <            super(m, p, b);
4700 >            super(p, b, i, f, t);
4701              this.searchFunction = searchFunction; this.result = result;
4702          }
4703          public final U getRawResult() { return result.get(); }
# Line 5992 | Line 4706 | public class ConcurrentHashMap<K,V>
4706              final AtomicReference<U> result;
4707              if ((searchFunction = this.searchFunction) != null &&
4708                  (result = this.result) != null) {
4709 <                for (int b;;) {
4709 >                for (int i = baseIndex, f, h; batch > 0 &&
4710 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
4711                      if (result.get() != null)
4712                          return;
4713 <                    if ((b = preSplit()) <= 0)
5999 <                        break;
4713 >                    addToPendingCount(1);
4714                      new SearchValuesTask<K,V,U>
4715 <                        (map, this, b, searchFunction, result).fork();
4715 >                        (this, batch >>>= 1, baseLimit = h, f, tab,
4716 >                         searchFunction, result).fork();
4717                  }
4718                  while (result.get() == null) {
4719 <                    V v; U u;
4720 <                    if ((v = advanceValue()) == null) {
4719 >                    U u;
4720 >                    Node<K,V> p;
4721 >                    if ((p = advance()) == null) {
4722                          propagateCompletion();
4723                          break;
4724                      }
4725 <                    if ((u = searchFunction.apply(v)) != null) {
4725 >                    if ((u = searchFunction.apply(p.val)) != null) {
4726                          if (result.compareAndSet(null, u))
4727                              quietlyCompleteRoot();
4728                          break;
# Line 6016 | Line 4732 | public class ConcurrentHashMap<K,V>
4732          }
4733      }
4734  
4735 <    @SuppressWarnings("serial") static final class SearchEntriesTask<K,V,U>
4736 <        extends Traverser<K,V,U> {
4735 >    static final class SearchEntriesTask<K,V,U>
4736 >        extends BulkTask<K,V,U> {
4737          final Function<Entry<K,V>, ? extends U> searchFunction;
4738          final AtomicReference<U> result;
4739          SearchEntriesTask
4740 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
4740 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
4741               Function<Entry<K,V>, ? extends U> searchFunction,
4742               AtomicReference<U> result) {
4743 <            super(m, p, b);
4743 >            super(p, b, i, f, t);
4744              this.searchFunction = searchFunction; this.result = result;
4745          }
4746          public final U getRawResult() { return result.get(); }
# Line 6033 | Line 4749 | public class ConcurrentHashMap<K,V>
4749              final AtomicReference<U> result;
4750              if ((searchFunction = this.searchFunction) != null &&
4751                  (result = this.result) != null) {
4752 <                for (int b;;) {
4752 >                for (int i = baseIndex, f, h; batch > 0 &&
4753 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
4754                      if (result.get() != null)
4755                          return;
4756 <                    if ((b = preSplit()) <= 0)
6040 <                        break;
4756 >                    addToPendingCount(1);
4757                      new SearchEntriesTask<K,V,U>
4758 <                        (map, this, b, searchFunction, result).fork();
4758 >                        (this, batch >>>= 1, baseLimit = h, f, tab,
4759 >                         searchFunction, result).fork();
4760                  }
4761                  while (result.get() == null) {
4762 <                    V v; U u;
4763 <                    if ((v = advanceValue()) == null) {
4762 >                    U u;
4763 >                    Node<K,V> p;
4764 >                    if ((p = advance()) == null) {
4765                          propagateCompletion();
4766                          break;
4767                      }
4768 <                    if ((u = searchFunction.apply(entryFor(nextKey,
6051 <                                                           v))) != null) {
4768 >                    if ((u = searchFunction.apply(p)) != null) {
4769                          if (result.compareAndSet(null, u))
4770                              quietlyCompleteRoot();
4771                          return;
# Line 6058 | Line 4775 | public class ConcurrentHashMap<K,V>
4775          }
4776      }
4777  
4778 <    @SuppressWarnings("serial") static final class SearchMappingsTask<K,V,U>
4779 <        extends Traverser<K,V,U> {
4778 >    static final class SearchMappingsTask<K,V,U>
4779 >        extends BulkTask<K,V,U> {
4780          final BiFunction<? super K, ? super V, ? extends U> searchFunction;
4781          final AtomicReference<U> result;
4782          SearchMappingsTask
4783 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
4783 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
4784               BiFunction<? super K, ? super V, ? extends U> searchFunction,
4785               AtomicReference<U> result) {
4786 <            super(m, p, b);
4786 >            super(p, b, i, f, t);
4787              this.searchFunction = searchFunction; this.result = result;
4788          }
4789          public final U getRawResult() { return result.get(); }
# Line 6075 | Line 4792 | public class ConcurrentHashMap<K,V>
4792              final AtomicReference<U> result;
4793              if ((searchFunction = this.searchFunction) != null &&
4794                  (result = this.result) != null) {
4795 <                for (int b;;) {
4795 >                for (int i = baseIndex, f, h; batch > 0 &&
4796 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
4797                      if (result.get() != null)
4798                          return;
4799 <                    if ((b = preSplit()) <= 0)
6082 <                        break;
4799 >                    addToPendingCount(1);
4800                      new SearchMappingsTask<K,V,U>
4801 <                        (map, this, b, searchFunction, result).fork();
4801 >                        (this, batch >>>= 1, baseLimit = h, f, tab,
4802 >                         searchFunction, result).fork();
4803                  }
4804                  while (result.get() == null) {
4805 <                    V v; U u;
4806 <                    if ((v = advanceValue()) == null) {
4805 >                    U u;
4806 >                    Node<K,V> p;
4807 >                    if ((p = advance()) == null) {
4808                          propagateCompletion();
4809                          break;
4810                      }
4811 <                    if ((u = searchFunction.apply(nextKey, v)) != null) {
4811 >                    if ((u = searchFunction.apply((K)p.key, p.val)) != null) {
4812                          if (result.compareAndSet(null, u))
4813                              quietlyCompleteRoot();
4814                          break;
# Line 6099 | Line 4818 | public class ConcurrentHashMap<K,V>
4818          }
4819      }
4820  
4821 <    @SuppressWarnings("serial") static final class ReduceKeysTask<K,V>
4822 <        extends Traverser<K,V,K> {
4821 >    static final class ReduceKeysTask<K,V>
4822 >        extends BulkTask<K,V,K> {
4823          final BiFunction<? super K, ? super K, ? extends K> reducer;
4824          K result;
4825          ReduceKeysTask<K,V> rights, nextRight;
4826          ReduceKeysTask
4827 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
4827 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
4828               ReduceKeysTask<K,V> nextRight,
4829               BiFunction<? super K, ? super K, ? extends K> reducer) {
4830 <            super(m, p, b); this.nextRight = nextRight;
4830 >            super(p, b, i, f, t); this.nextRight = nextRight;
4831              this.reducer = reducer;
4832          }
4833          public final K getRawResult() { return result; }
4834 <        @SuppressWarnings("unchecked") public final void compute() {
4834 >        public final void compute() {
4835              final BiFunction<? super K, ? super K, ? extends K> reducer;
4836              if ((reducer = this.reducer) != null) {
4837 <                for (int b; (b = preSplit()) > 0;)
4837 >                for (int i = baseIndex, f, h; batch > 0 &&
4838 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
4839 >                    addToPendingCount(1);
4840                      (rights = new ReduceKeysTask<K,V>
4841 <                     (map, this, b, rights, reducer)).fork();
4842 <                K u, r = null;
4843 <                while ((u = advanceKey()) != null) {
4841 >                     (this, batch >>>= 1, baseLimit = h, f, tab,
4842 >                      rights, reducer)).fork();
4843 >                }
4844 >                K r = null;
4845 >                for (Node<K,V> p; (p = advance()) != null; ) {
4846 >                    K u = (K)p.key;
4847                      r = (r == null) ? u : u == null ? r : reducer.apply(r, u);
4848                  }
4849                  result = r;
# Line 6140 | Line 4864 | public class ConcurrentHashMap<K,V>
4864          }
4865      }
4866  
4867 <    @SuppressWarnings("serial") static final class ReduceValuesTask<K,V>
4868 <        extends Traverser<K,V,V> {
4867 >    static final class ReduceValuesTask<K,V>
4868 >        extends BulkTask<K,V,V> {
4869          final BiFunction<? super V, ? super V, ? extends V> reducer;
4870          V result;
4871          ReduceValuesTask<K,V> rights, nextRight;
4872          ReduceValuesTask
4873 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
4873 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
4874               ReduceValuesTask<K,V> nextRight,
4875               BiFunction<? super V, ? super V, ? extends V> reducer) {
4876 <            super(m, p, b); this.nextRight = nextRight;
4876 >            super(p, b, i, f, t); this.nextRight = nextRight;
4877              this.reducer = reducer;
4878          }
4879          public final V getRawResult() { return result; }
4880 <        @SuppressWarnings("unchecked") public final void compute() {
4880 >        public final void compute() {
4881              final BiFunction<? super V, ? super V, ? extends V> reducer;
4882              if ((reducer = this.reducer) != null) {
4883 <                for (int b; (b = preSplit()) > 0;)
4883 >                for (int i = baseIndex, f, h; batch > 0 &&
4884 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
4885 >                    addToPendingCount(1);
4886                      (rights = new ReduceValuesTask<K,V>
4887 <                     (map, this, b, rights, reducer)).fork();
4888 <                V r = null, v;
4889 <                while ((v = advanceValue()) != null)
4887 >                     (this, batch >>>= 1, baseLimit = h, f, tab,
4888 >                      rights, reducer)).fork();
4889 >                }
4890 >                V r = null;
4891 >                for (Node<K,V> p; (p = advance()) != null; ) {
4892 >                    V v = p.val;
4893                      r = (r == null) ? v : reducer.apply(r, v);
4894 +                }
4895                  result = r;
4896                  CountedCompleter<?> c;
4897                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
# Line 6180 | Line 4910 | public class ConcurrentHashMap<K,V>
4910          }
4911      }
4912  
4913 <    @SuppressWarnings("serial") static final class ReduceEntriesTask<K,V>
4914 <        extends Traverser<K,V,Map.Entry<K,V>> {
4913 >    static final class ReduceEntriesTask<K,V>
4914 >        extends BulkTask<K,V,Map.Entry<K,V>> {
4915          final BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer;
4916          Map.Entry<K,V> result;
4917          ReduceEntriesTask<K,V> rights, nextRight;
4918          ReduceEntriesTask
4919 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
4919 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
4920               ReduceEntriesTask<K,V> nextRight,
4921               BiFunction<Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
4922 <            super(m, p, b); this.nextRight = nextRight;
4922 >            super(p, b, i, f, t); this.nextRight = nextRight;
4923              this.reducer = reducer;
4924          }
4925          public final Map.Entry<K,V> getRawResult() { return result; }
4926 <        @SuppressWarnings("unchecked") public final void compute() {
4926 >        public final void compute() {
4927              final BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer;
4928              if ((reducer = this.reducer) != null) {
4929 <                for (int b; (b = preSplit()) > 0;)
4929 >                for (int i = baseIndex, f, h; batch > 0 &&
4930 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
4931 >                    addToPendingCount(1);
4932                      (rights = new ReduceEntriesTask<K,V>
4933 <                     (map, this, b, rights, reducer)).fork();
4934 <                Map.Entry<K,V> r = null;
6203 <                V v;
6204 <                while ((v = advanceValue()) != null) {
6205 <                    Map.Entry<K,V> u = entryFor(nextKey, v);
6206 <                    r = (r == null) ? u : reducer.apply(r, u);
4933 >                     (this, batch >>>= 1, baseLimit = h, f, tab,
4934 >                      rights, reducer)).fork();
4935                  }
4936 +                Map.Entry<K,V> r = null;
4937 +                for (Node<K,V> p; (p = advance()) != null; )
4938 +                    r = (r == null) ? p : reducer.apply(r, p);
4939                  result = r;
4940                  CountedCompleter<?> c;
4941                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
# Line 6223 | Line 4954 | public class ConcurrentHashMap<K,V>
4954          }
4955      }
4956  
4957 <    @SuppressWarnings("serial") static final class MapReduceKeysTask<K,V,U>
4958 <        extends Traverser<K,V,U> {
4957 >    static final class MapReduceKeysTask<K,V,U>
4958 >        extends BulkTask<K,V,U> {
4959          final Function<? super K, ? extends U> transformer;
4960          final BiFunction<? super U, ? super U, ? extends U> reducer;
4961          U result;
4962          MapReduceKeysTask<K,V,U> rights, nextRight;
4963          MapReduceKeysTask
4964 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
4964 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
4965               MapReduceKeysTask<K,V,U> nextRight,
4966               Function<? super K, ? extends U> transformer,
4967               BiFunction<? super U, ? super U, ? extends U> reducer) {
4968 <            super(m, p, b); this.nextRight = nextRight;
4968 >            super(p, b, i, f, t); this.nextRight = nextRight;
4969              this.transformer = transformer;
4970              this.reducer = reducer;
4971          }
4972          public final U getRawResult() { return result; }
4973 <        @SuppressWarnings("unchecked") public final void compute() {
4973 >        public final void compute() {
4974              final Function<? super K, ? extends U> transformer;
4975              final BiFunction<? super U, ? super U, ? extends U> reducer;
4976              if ((transformer = this.transformer) != null &&
4977                  (reducer = this.reducer) != null) {
4978 <                for (int b; (b = preSplit()) > 0;)
4978 >                for (int i = baseIndex, f, h; batch > 0 &&
4979 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
4980 >                    addToPendingCount(1);
4981                      (rights = new MapReduceKeysTask<K,V,U>
4982 <                     (map, this, b, rights, transformer, reducer)).fork();
4983 <                K k; U r = null, u;
4984 <                while ((k = advanceKey()) != null) {
4985 <                    if ((u = transformer.apply(k)) != null)
4982 >                     (this, batch >>>= 1, baseLimit = h, f, tab,
4983 >                      rights, transformer, reducer)).fork();
4984 >                }
4985 >                U r = null;
4986 >                for (Node<K,V> p; (p = advance()) != null; ) {
4987 >                    U u;
4988 >                    if ((u = transformer.apply((K)p.key)) != null)
4989                          r = (r == null) ? u : reducer.apply(r, u);
4990                  }
4991                  result = r;
# Line 6270 | Line 5006 | public class ConcurrentHashMap<K,V>
5006          }
5007      }
5008  
5009 <    @SuppressWarnings("serial") static final class MapReduceValuesTask<K,V,U>
5010 <        extends Traverser<K,V,U> {
5009 >    static final class MapReduceValuesTask<K,V,U>
5010 >        extends BulkTask<K,V,U> {
5011          final Function<? super V, ? extends U> transformer;
5012          final BiFunction<? super U, ? super U, ? extends U> reducer;
5013          U result;
5014          MapReduceValuesTask<K,V,U> rights, nextRight;
5015          MapReduceValuesTask
5016 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5016 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
5017               MapReduceValuesTask<K,V,U> nextRight,
5018               Function<? super V, ? extends U> transformer,
5019               BiFunction<? super U, ? super U, ? extends U> reducer) {
5020 <            super(m, p, b); this.nextRight = nextRight;
5020 >            super(p, b, i, f, t); this.nextRight = nextRight;
5021              this.transformer = transformer;
5022              this.reducer = reducer;
5023          }
5024          public final U getRawResult() { return result; }
5025 <        @SuppressWarnings("unchecked") public final void compute() {
5025 >        public final void compute() {
5026              final Function<? super V, ? extends U> transformer;
5027              final BiFunction<? super U, ? super U, ? extends U> reducer;
5028              if ((transformer = this.transformer) != null &&
5029                  (reducer = this.reducer) != null) {
5030 <                for (int b; (b = preSplit()) > 0;)
5030 >                for (int i = baseIndex, f, h; batch > 0 &&
5031 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
5032 >                    addToPendingCount(1);
5033                      (rights = new MapReduceValuesTask<K,V,U>
5034 <                     (map, this, b, rights, transformer, reducer)).fork();
5035 <                U r = null, u;
5036 <                V v;
5037 <                while ((v = advanceValue()) != null) {
5038 <                    if ((u = transformer.apply(v)) != null)
5034 >                     (this, batch >>>= 1, baseLimit = h, f, tab,
5035 >                      rights, transformer, reducer)).fork();
5036 >                }
5037 >                U r = null;
5038 >                for (Node<K,V> p; (p = advance()) != null; ) {
5039 >                    U u;
5040 >                    if ((u = transformer.apply(p.val)) != null)
5041                          r = (r == null) ? u : reducer.apply(r, u);
5042                  }
5043                  result = r;
# Line 6318 | Line 5058 | public class ConcurrentHashMap<K,V>
5058          }
5059      }
5060  
5061 <    @SuppressWarnings("serial") static final class MapReduceEntriesTask<K,V,U>
5062 <        extends Traverser<K,V,U> {
5061 >    static final class MapReduceEntriesTask<K,V,U>
5062 >        extends BulkTask<K,V,U> {
5063          final Function<Map.Entry<K,V>, ? extends U> transformer;
5064          final BiFunction<? super U, ? super U, ? extends U> reducer;
5065          U result;
5066          MapReduceEntriesTask<K,V,U> rights, nextRight;
5067          MapReduceEntriesTask
5068 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5068 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
5069               MapReduceEntriesTask<K,V,U> nextRight,
5070               Function<Map.Entry<K,V>, ? extends U> transformer,
5071               BiFunction<? super U, ? super U, ? extends U> reducer) {
5072 <            super(m, p, b); this.nextRight = nextRight;
5072 >            super(p, b, i, f, t); this.nextRight = nextRight;
5073              this.transformer = transformer;
5074              this.reducer = reducer;
5075          }
5076          public final U getRawResult() { return result; }
5077 <        @SuppressWarnings("unchecked") public final void compute() {
5077 >        public final void compute() {
5078              final Function<Map.Entry<K,V>, ? extends U> transformer;
5079              final BiFunction<? super U, ? super U, ? extends U> reducer;
5080              if ((transformer = this.transformer) != null &&
5081                  (reducer = this.reducer) != null) {
5082 <                for (int b; (b = preSplit()) > 0;)
5082 >                for (int i = baseIndex, f, h; batch > 0 &&
5083 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
5084 >                    addToPendingCount(1);
5085                      (rights = new MapReduceEntriesTask<K,V,U>
5086 <                     (map, this, b, rights, transformer, reducer)).fork();
5087 <                U r = null, u;
5088 <                V v;
5089 <                while ((v = advanceValue()) != null) {
5090 <                    if ((u = transformer.apply(entryFor(nextKey,
5091 <                                                        v))) != null)
5086 >                     (this, batch >>>= 1, baseLimit = h, f, tab,
5087 >                      rights, transformer, reducer)).fork();
5088 >                }
5089 >                U r = null;
5090 >                for (Node<K,V> p; (p = advance()) != null; ) {
5091 >                    U u;
5092 >                    if ((u = transformer.apply(p)) != null)
5093                          r = (r == null) ? u : reducer.apply(r, u);
5094                  }
5095                  result = r;
# Line 6367 | Line 5110 | public class ConcurrentHashMap<K,V>
5110          }
5111      }
5112  
5113 <    @SuppressWarnings("serial") static final class MapReduceMappingsTask<K,V,U>
5114 <        extends Traverser<K,V,U> {
5113 >    static final class MapReduceMappingsTask<K,V,U>
5114 >        extends BulkTask<K,V,U> {
5115          final BiFunction<? super K, ? super V, ? extends U> transformer;
5116          final BiFunction<? super U, ? super U, ? extends U> reducer;
5117          U result;
5118          MapReduceMappingsTask<K,V,U> rights, nextRight;
5119          MapReduceMappingsTask
5120 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5120 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
5121               MapReduceMappingsTask<K,V,U> nextRight,
5122               BiFunction<? super K, ? super V, ? extends U> transformer,
5123               BiFunction<? super U, ? super U, ? extends U> reducer) {
5124 <            super(m, p, b); this.nextRight = nextRight;
5124 >            super(p, b, i, f, t); this.nextRight = nextRight;
5125              this.transformer = transformer;
5126              this.reducer = reducer;
5127          }
5128          public final U getRawResult() { return result; }
5129 <        @SuppressWarnings("unchecked") public final void compute() {
5129 >        public final void compute() {
5130              final BiFunction<? super K, ? super V, ? extends U> transformer;
5131              final BiFunction<? super U, ? super U, ? extends U> reducer;
5132              if ((transformer = this.transformer) != null &&
5133                  (reducer = this.reducer) != null) {
5134 <                for (int b; (b = preSplit()) > 0;)
5134 >                for (int i = baseIndex, f, h; batch > 0 &&
5135 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
5136 >                    addToPendingCount(1);
5137                      (rights = new MapReduceMappingsTask<K,V,U>
5138 <                     (map, this, b, rights, transformer, reducer)).fork();
5139 <                U r = null, u;
5140 <                V v;
5141 <                while ((v = advanceValue()) != null) {
5142 <                    if ((u = transformer.apply(nextKey, v)) != null)
5138 >                     (this, batch >>>= 1, baseLimit = h, f, tab,
5139 >                      rights, transformer, reducer)).fork();
5140 >                }
5141 >                U r = null;
5142 >                for (Node<K,V> p; (p = advance()) != null; ) {
5143 >                    U u;
5144 >                    if ((u = transformer.apply((K)p.key, p.val)) != null)
5145                          r = (r == null) ? u : reducer.apply(r, u);
5146                  }
5147                  result = r;
# Line 6415 | Line 5162 | public class ConcurrentHashMap<K,V>
5162          }
5163      }
5164  
5165 <    @SuppressWarnings("serial") static final class MapReduceKeysToDoubleTask<K,V>
5166 <        extends Traverser<K,V,Double> {
5165 >    static final class MapReduceKeysToDoubleTask<K,V>
5166 >        extends BulkTask<K,V,Double> {
5167          final ToDoubleFunction<? super K> transformer;
5168          final DoubleBinaryOperator reducer;
5169          final double basis;
5170          double result;
5171          MapReduceKeysToDoubleTask<K,V> rights, nextRight;
5172          MapReduceKeysToDoubleTask
5173 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5173 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
5174               MapReduceKeysToDoubleTask<K,V> nextRight,
5175               ToDoubleFunction<? super K> transformer,
5176               double basis,
5177               DoubleBinaryOperator reducer) {
5178 <            super(m, p, b); this.nextRight = nextRight;
5178 >            super(p, b, i, f, t); this.nextRight = nextRight;
5179              this.transformer = transformer;
5180              this.basis = basis; this.reducer = reducer;
5181          }
5182          public final Double getRawResult() { return result; }
5183 <        @SuppressWarnings("unchecked") public final void compute() {
5183 >        public final void compute() {
5184              final ToDoubleFunction<? super K> transformer;
5185              final DoubleBinaryOperator reducer;
5186              if ((transformer = this.transformer) != null &&
5187                  (reducer = this.reducer) != null) {
5188                  double r = this.basis;
5189 <                for (int b; (b = preSplit()) > 0;)
5189 >                for (int i = baseIndex, f, h; batch > 0 &&
5190 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
5191 >                    addToPendingCount(1);
5192                      (rights = new MapReduceKeysToDoubleTask<K,V>
5193 <                     (map, this, b, rights, transformer, r, reducer)).fork();
5194 <                K k;
5195 <                while ((k = advanceKey()) != null)
5196 <                    r = reducer.applyAsDouble(r, transformer.applyAsDouble(k));
5193 >                     (this, batch >>>= 1, baseLimit = h, f, tab,
5194 >                      rights, transformer, r, reducer)).fork();
5195 >                }
5196 >                for (Node<K,V> p; (p = advance()) != null; )
5197 >                    r = reducer.applyAsDouble(r, transformer.applyAsDouble((K)p.key));
5198                  result = r;
5199                  CountedCompleter<?> c;
5200                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
# Line 6460 | Line 5210 | public class ConcurrentHashMap<K,V>
5210          }
5211      }
5212  
5213 <    @SuppressWarnings("serial") static final class MapReduceValuesToDoubleTask<K,V>
5214 <        extends Traverser<K,V,Double> {
5213 >    static final class MapReduceValuesToDoubleTask<K,V>
5214 >        extends BulkTask<K,V,Double> {
5215          final ToDoubleFunction<? super V> transformer;
5216          final DoubleBinaryOperator reducer;
5217          final double basis;
5218          double result;
5219          MapReduceValuesToDoubleTask<K,V> rights, nextRight;
5220          MapReduceValuesToDoubleTask
5221 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5221 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
5222               MapReduceValuesToDoubleTask<K,V> nextRight,
5223               ToDoubleFunction<? super V> transformer,
5224               double basis,
5225               DoubleBinaryOperator reducer) {
5226 <            super(m, p, b); this.nextRight = nextRight;
5226 >            super(p, b, i, f, t); this.nextRight = nextRight;
5227              this.transformer = transformer;
5228              this.basis = basis; this.reducer = reducer;
5229          }
5230          public final Double getRawResult() { return result; }
5231 <        @SuppressWarnings("unchecked") public final void compute() {
5231 >        public final void compute() {
5232              final ToDoubleFunction<? super V> transformer;
5233              final DoubleBinaryOperator reducer;
5234              if ((transformer = this.transformer) != null &&
5235                  (reducer = this.reducer) != null) {
5236                  double r = this.basis;
5237 <                for (int b; (b = preSplit()) > 0;)
5237 >                for (int i = baseIndex, f, h; batch > 0 &&
5238 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
5239 >                    addToPendingCount(1);
5240                      (rights = new MapReduceValuesToDoubleTask<K,V>
5241 <                     (map, this, b, rights, transformer, r, reducer)).fork();
5242 <                V v;
5243 <                while ((v = advanceValue()) != null)
5244 <                    r = reducer.applyAsDouble(r, transformer.applyAsDouble(v));
5241 >                     (this, batch >>>= 1, baseLimit = h, f, tab,
5242 >                      rights, transformer, r, reducer)).fork();
5243 >                }
5244 >                for (Node<K,V> p; (p = advance()) != null; )
5245 >                    r = reducer.applyAsDouble(r, transformer.applyAsDouble(p.val));
5246                  result = r;
5247                  CountedCompleter<?> c;
5248                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
# Line 6505 | Line 5258 | public class ConcurrentHashMap<K,V>
5258          }
5259      }
5260  
5261 <    @SuppressWarnings("serial") static final class MapReduceEntriesToDoubleTask<K,V>
5262 <        extends Traverser<K,V,Double> {
5261 >    static final class MapReduceEntriesToDoubleTask<K,V>
5262 >        extends BulkTask<K,V,Double> {
5263          final ToDoubleFunction<Map.Entry<K,V>> transformer;
5264          final DoubleBinaryOperator reducer;
5265          final double basis;
5266          double result;
5267          MapReduceEntriesToDoubleTask<K,V> rights, nextRight;
5268          MapReduceEntriesToDoubleTask
5269 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5269 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
5270               MapReduceEntriesToDoubleTask<K,V> nextRight,
5271               ToDoubleFunction<Map.Entry<K,V>> transformer,
5272               double basis,
5273               DoubleBinaryOperator reducer) {
5274 <            super(m, p, b); this.nextRight = nextRight;
5274 >            super(p, b, i, f, t); this.nextRight = nextRight;
5275              this.transformer = transformer;
5276              this.basis = basis; this.reducer = reducer;
5277          }
5278          public final Double getRawResult() { return result; }
5279 <        @SuppressWarnings("unchecked") public final void compute() {
5279 >        public final void compute() {
5280              final ToDoubleFunction<Map.Entry<K,V>> transformer;
5281              final DoubleBinaryOperator reducer;
5282              if ((transformer = this.transformer) != null &&
5283                  (reducer = this.reducer) != null) {
5284                  double r = this.basis;
5285 <                for (int b; (b = preSplit()) > 0;)
5285 >                for (int i = baseIndex, f, h; batch > 0 &&
5286 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
5287 >                    addToPendingCount(1);
5288                      (rights = new MapReduceEntriesToDoubleTask<K,V>
5289 <                     (map, this, b, rights, transformer, r, reducer)).fork();
5290 <                V v;
5291 <                while ((v = advanceValue()) != null)
5292 <                    r = reducer.applyAsDouble(r, transformer.applyAsDouble(entryFor(nextKey,
5293 <                                                                    v)));
5289 >                     (this, batch >>>= 1, baseLimit = h, f, tab,
5290 >                      rights, transformer, r, reducer)).fork();
5291 >                }
5292 >                for (Node<K,V> p; (p = advance()) != null; )
5293 >                    r = reducer.applyAsDouble(r, transformer.applyAsDouble(p));
5294                  result = r;
5295                  CountedCompleter<?> c;
5296                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
# Line 6551 | Line 5306 | public class ConcurrentHashMap<K,V>
5306          }
5307      }
5308  
5309 <    @SuppressWarnings("serial") static final class MapReduceMappingsToDoubleTask<K,V>
5310 <        extends Traverser<K,V,Double> {
5309 >    static final class MapReduceMappingsToDoubleTask<K,V>
5310 >        extends BulkTask<K,V,Double> {
5311          final ToDoubleBiFunction<? super K, ? super V> transformer;
5312          final DoubleBinaryOperator reducer;
5313          final double basis;
5314          double result;
5315          MapReduceMappingsToDoubleTask<K,V> rights, nextRight;
5316          MapReduceMappingsToDoubleTask
5317 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5317 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
5318               MapReduceMappingsToDoubleTask<K,V> nextRight,
5319               ToDoubleBiFunction<? super K, ? super V> transformer,
5320               double basis,
5321               DoubleBinaryOperator reducer) {
5322 <            super(m, p, b); this.nextRight = nextRight;
5322 >            super(p, b, i, f, t); this.nextRight = nextRight;
5323              this.transformer = transformer;
5324              this.basis = basis; this.reducer = reducer;
5325          }
5326          public final Double getRawResult() { return result; }
5327 <        @SuppressWarnings("unchecked") public final void compute() {
5327 >        public final void compute() {
5328              final ToDoubleBiFunction<? super K, ? super V> transformer;
5329              final DoubleBinaryOperator reducer;
5330              if ((transformer = this.transformer) != null &&
5331                  (reducer = this.reducer) != null) {
5332                  double r = this.basis;
5333 <                for (int b; (b = preSplit()) > 0;)
5333 >                for (int i = baseIndex, f, h; batch > 0 &&
5334 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
5335 >                    addToPendingCount(1);
5336                      (rights = new MapReduceMappingsToDoubleTask<K,V>
5337 <                     (map, this, b, rights, transformer, r, reducer)).fork();
5338 <                V v;
5339 <                while ((v = advanceValue()) != null)
5340 <                    r = reducer.applyAsDouble(r, transformer.applyAsDouble(nextKey, v));
5337 >                     (this, batch >>>= 1, baseLimit = h, f, tab,
5338 >                      rights, transformer, r, reducer)).fork();
5339 >                }
5340 >                for (Node<K,V> p; (p = advance()) != null; )
5341 >                    r = reducer.applyAsDouble(r, transformer.applyAsDouble((K)p.key, p.val));
5342                  result = r;
5343                  CountedCompleter<?> c;
5344                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
# Line 6596 | Line 5354 | public class ConcurrentHashMap<K,V>
5354          }
5355      }
5356  
5357 <    @SuppressWarnings("serial") static final class MapReduceKeysToLongTask<K,V>
5358 <        extends Traverser<K,V,Long> {
5357 >    static final class MapReduceKeysToLongTask<K,V>
5358 >        extends BulkTask<K,V,Long> {
5359          final ToLongFunction<? super K> transformer;
5360          final LongBinaryOperator reducer;
5361          final long basis;
5362          long result;
5363          MapReduceKeysToLongTask<K,V> rights, nextRight;
5364          MapReduceKeysToLongTask
5365 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5365 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
5366               MapReduceKeysToLongTask<K,V> nextRight,
5367               ToLongFunction<? super K> transformer,
5368               long basis,
5369               LongBinaryOperator reducer) {
5370 <            super(m, p, b); this.nextRight = nextRight;
5370 >            super(p, b, i, f, t); this.nextRight = nextRight;
5371              this.transformer = transformer;
5372              this.basis = basis; this.reducer = reducer;
5373          }
5374          public final Long getRawResult() { return result; }
5375 <        @SuppressWarnings("unchecked") public final void compute() {
5375 >        public final void compute() {
5376              final ToLongFunction<? super K> transformer;
5377              final LongBinaryOperator reducer;
5378              if ((transformer = this.transformer) != null &&
5379                  (reducer = this.reducer) != null) {
5380                  long r = this.basis;
5381 <                for (int b; (b = preSplit()) > 0;)
5381 >                for (int i = baseIndex, f, h; batch > 0 &&
5382 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
5383 >                    addToPendingCount(1);
5384                      (rights = new MapReduceKeysToLongTask<K,V>
5385 <                     (map, this, b, rights, transformer, r, reducer)).fork();
5386 <                K k;
5387 <                while ((k = advanceKey()) != null)
5388 <                    r = reducer.applyAsLong(r, transformer.applyAsLong(k));
5385 >                     (this, batch >>>= 1, baseLimit = h, f, tab,
5386 >                      rights, transformer, r, reducer)).fork();
5387 >                }
5388 >                for (Node<K,V> p; (p = advance()) != null; )
5389 >                    r = reducer.applyAsLong(r, transformer.applyAsLong((K)p.key));
5390                  result = r;
5391                  CountedCompleter<?> c;
5392                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
# Line 6641 | Line 5402 | public class ConcurrentHashMap<K,V>
5402          }
5403      }
5404  
5405 <    @SuppressWarnings("serial") static final class MapReduceValuesToLongTask<K,V>
5406 <        extends Traverser<K,V,Long> {
5405 >    static final class MapReduceValuesToLongTask<K,V>
5406 >        extends BulkTask<K,V,Long> {
5407          final ToLongFunction<? super V> transformer;
5408          final LongBinaryOperator reducer;
5409          final long basis;
5410          long result;
5411          MapReduceValuesToLongTask<K,V> rights, nextRight;
5412          MapReduceValuesToLongTask
5413 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5413 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
5414               MapReduceValuesToLongTask<K,V> nextRight,
5415               ToLongFunction<? super V> transformer,
5416               long basis,
5417               LongBinaryOperator reducer) {
5418 <            super(m, p, b); this.nextRight = nextRight;
5418 >            super(p, b, i, f, t); this.nextRight = nextRight;
5419              this.transformer = transformer;
5420              this.basis = basis; this.reducer = reducer;
5421          }
5422          public final Long getRawResult() { return result; }
5423 <        @SuppressWarnings("unchecked") public final void compute() {
5423 >        public final void compute() {
5424              final ToLongFunction<? super V> transformer;
5425              final LongBinaryOperator reducer;
5426              if ((transformer = this.transformer) != null &&
5427                  (reducer = this.reducer) != null) {
5428                  long r = this.basis;
5429 <                for (int b; (b = preSplit()) > 0;)
5429 >                for (int i = baseIndex, f, h; batch > 0 &&
5430 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
5431 >                    addToPendingCount(1);
5432                      (rights = new MapReduceValuesToLongTask<K,V>
5433 <                     (map, this, b, rights, transformer, r, reducer)).fork();
5434 <                V v;
5435 <                while ((v = advanceValue()) != null)
5436 <                    r = reducer.applyAsLong(r, transformer.applyAsLong(v));
5433 >                     (this, batch >>>= 1, baseLimit = h, f, tab,
5434 >                      rights, transformer, r, reducer)).fork();
5435 >                }
5436 >                for (Node<K,V> p; (p = advance()) != null; )
5437 >                    r = reducer.applyAsLong(r, transformer.applyAsLong(p.val));
5438                  result = r;
5439                  CountedCompleter<?> c;
5440                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
# Line 6686 | Line 5450 | public class ConcurrentHashMap<K,V>
5450          }
5451      }
5452  
5453 <    @SuppressWarnings("serial") static final class MapReduceEntriesToLongTask<K,V>
5454 <        extends Traverser<K,V,Long> {
5453 >    static final class MapReduceEntriesToLongTask<K,V>
5454 >        extends BulkTask<K,V,Long> {
5455          final ToLongFunction<Map.Entry<K,V>> transformer;
5456          final LongBinaryOperator reducer;
5457          final long basis;
5458          long result;
5459          MapReduceEntriesToLongTask<K,V> rights, nextRight;
5460          MapReduceEntriesToLongTask
5461 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5461 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
5462               MapReduceEntriesToLongTask<K,V> nextRight,
5463               ToLongFunction<Map.Entry<K,V>> transformer,
5464               long basis,
5465               LongBinaryOperator reducer) {
5466 <            super(m, p, b); this.nextRight = nextRight;
5466 >            super(p, b, i, f, t); this.nextRight = nextRight;
5467              this.transformer = transformer;
5468              this.basis = basis; this.reducer = reducer;
5469          }
5470          public final Long getRawResult() { return result; }
5471 <        @SuppressWarnings("unchecked") public final void compute() {
5471 >        public final void compute() {
5472              final ToLongFunction<Map.Entry<K,V>> transformer;
5473              final LongBinaryOperator reducer;
5474              if ((transformer = this.transformer) != null &&
5475                  (reducer = this.reducer) != null) {
5476                  long r = this.basis;
5477 <                for (int b; (b = preSplit()) > 0;)
5477 >                for (int i = baseIndex, f, h; batch > 0 &&
5478 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
5479 >                    addToPendingCount(1);
5480                      (rights = new MapReduceEntriesToLongTask<K,V>
5481 <                     (map, this, b, rights, transformer, r, reducer)).fork();
5482 <                V v;
5483 <                while ((v = advanceValue()) != null)
5484 <                    r = reducer.applyAsLong(r, transformer.applyAsLong(entryFor(nextKey, v)));
5481 >                     (this, batch >>>= 1, baseLimit = h, f, tab,
5482 >                      rights, transformer, r, reducer)).fork();
5483 >                }
5484 >                for (Node<K,V> p; (p = advance()) != null; )
5485 >                    r = reducer.applyAsLong(r, transformer.applyAsLong(p));
5486                  result = r;
5487                  CountedCompleter<?> c;
5488                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
# Line 6731 | Line 5498 | public class ConcurrentHashMap<K,V>
5498          }
5499      }
5500  
5501 <    @SuppressWarnings("serial") static final class MapReduceMappingsToLongTask<K,V>
5502 <        extends Traverser<K,V,Long> {
5501 >    static final class MapReduceMappingsToLongTask<K,V>
5502 >        extends BulkTask<K,V,Long> {
5503          final ToLongBiFunction<? super K, ? super V> transformer;
5504          final LongBinaryOperator reducer;
5505          final long basis;
5506          long result;
5507          MapReduceMappingsToLongTask<K,V> rights, nextRight;
5508          MapReduceMappingsToLongTask
5509 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5509 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
5510               MapReduceMappingsToLongTask<K,V> nextRight,
5511               ToLongBiFunction<? super K, ? super V> transformer,
5512               long basis,
5513               LongBinaryOperator reducer) {
5514 <            super(m, p, b); this.nextRight = nextRight;
5514 >            super(p, b, i, f, t); this.nextRight = nextRight;
5515              this.transformer = transformer;
5516              this.basis = basis; this.reducer = reducer;
5517          }
5518          public final Long getRawResult() { return result; }
5519 <        @SuppressWarnings("unchecked") public final void compute() {
5519 >        public final void compute() {
5520              final ToLongBiFunction<? super K, ? super V> transformer;
5521              final LongBinaryOperator reducer;
5522              if ((transformer = this.transformer) != null &&
5523                  (reducer = this.reducer) != null) {
5524                  long r = this.basis;
5525 <                for (int b; (b = preSplit()) > 0;)
5525 >                for (int i = baseIndex, f, h; batch > 0 &&
5526 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
5527 >                    addToPendingCount(1);
5528                      (rights = new MapReduceMappingsToLongTask<K,V>
5529 <                     (map, this, b, rights, transformer, r, reducer)).fork();
5530 <                V v;
5531 <                while ((v = advanceValue()) != null)
5532 <                    r = reducer.applyAsLong(r, transformer.applyAsLong(nextKey, v));
5529 >                     (this, batch >>>= 1, baseLimit = h, f, tab,
5530 >                      rights, transformer, r, reducer)).fork();
5531 >                }
5532 >                for (Node<K,V> p; (p = advance()) != null; )
5533 >                    r = reducer.applyAsLong(r, transformer.applyAsLong((K)p.key, p.val));
5534                  result = r;
5535                  CountedCompleter<?> c;
5536                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
# Line 6776 | Line 5546 | public class ConcurrentHashMap<K,V>
5546          }
5547      }
5548  
5549 <    @SuppressWarnings("serial") static final class MapReduceKeysToIntTask<K,V>
5550 <        extends Traverser<K,V,Integer> {
5549 >    static final class MapReduceKeysToIntTask<K,V>
5550 >        extends BulkTask<K,V,Integer> {
5551          final ToIntFunction<? super K> transformer;
5552          final IntBinaryOperator reducer;
5553          final int basis;
5554          int result;
5555          MapReduceKeysToIntTask<K,V> rights, nextRight;
5556          MapReduceKeysToIntTask
5557 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5557 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
5558               MapReduceKeysToIntTask<K,V> nextRight,
5559               ToIntFunction<? super K> transformer,
5560               int basis,
5561               IntBinaryOperator reducer) {
5562 <            super(m, p, b); this.nextRight = nextRight;
5562 >            super(p, b, i, f, t); this.nextRight = nextRight;
5563              this.transformer = transformer;
5564              this.basis = basis; this.reducer = reducer;
5565          }
5566          public final Integer getRawResult() { return result; }
5567 <        @SuppressWarnings("unchecked") public final void compute() {
5567 >        public final void compute() {
5568              final ToIntFunction<? super K> transformer;
5569              final IntBinaryOperator reducer;
5570              if ((transformer = this.transformer) != null &&
5571                  (reducer = this.reducer) != null) {
5572                  int r = this.basis;
5573 <                for (int b; (b = preSplit()) > 0;)
5573 >                for (int i = baseIndex, f, h; batch > 0 &&
5574 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
5575 >                    addToPendingCount(1);
5576                      (rights = new MapReduceKeysToIntTask<K,V>
5577 <                     (map, this, b, rights, transformer, r, reducer)).fork();
5578 <                K k;
5579 <                while ((k = advanceKey()) != null)
5580 <                    r = reducer.applyAsInt(r, transformer.applyAsInt(k));
5577 >                     (this, batch >>>= 1, baseLimit = h, f, tab,
5578 >                      rights, transformer, r, reducer)).fork();
5579 >                }
5580 >                for (Node<K,V> p; (p = advance()) != null; )
5581 >                    r = reducer.applyAsInt(r, transformer.applyAsInt((K)p.key));
5582                  result = r;
5583                  CountedCompleter<?> c;
5584                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
# Line 6821 | Line 5594 | public class ConcurrentHashMap<K,V>
5594          }
5595      }
5596  
5597 <    @SuppressWarnings("serial") static final class MapReduceValuesToIntTask<K,V>
5598 <        extends Traverser<K,V,Integer> {
5597 >    static final class MapReduceValuesToIntTask<K,V>
5598 >        extends BulkTask<K,V,Integer> {
5599          final ToIntFunction<? super V> transformer;
5600          final IntBinaryOperator reducer;
5601          final int basis;
5602          int result;
5603          MapReduceValuesToIntTask<K,V> rights, nextRight;
5604          MapReduceValuesToIntTask
5605 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5605 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
5606               MapReduceValuesToIntTask<K,V> nextRight,
5607               ToIntFunction<? super V> transformer,
5608               int basis,
5609               IntBinaryOperator reducer) {
5610 <            super(m, p, b); this.nextRight = nextRight;
5610 >            super(p, b, i, f, t); this.nextRight = nextRight;
5611              this.transformer = transformer;
5612              this.basis = basis; this.reducer = reducer;
5613          }
5614          public final Integer getRawResult() { return result; }
5615 <        @SuppressWarnings("unchecked") public final void compute() {
5615 >        public final void compute() {
5616              final ToIntFunction<? super V> transformer;
5617              final IntBinaryOperator reducer;
5618              if ((transformer = this.transformer) != null &&
5619                  (reducer = this.reducer) != null) {
5620                  int r = this.basis;
5621 <                for (int b; (b = preSplit()) > 0;)
5621 >                for (int i = baseIndex, f, h; batch > 0 &&
5622 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
5623 >                    addToPendingCount(1);
5624                      (rights = new MapReduceValuesToIntTask<K,V>
5625 <                     (map, this, b, rights, transformer, r, reducer)).fork();
5626 <                V v;
5627 <                while ((v = advanceValue()) != null)
5628 <                    r = reducer.applyAsInt(r, transformer.applyAsInt(v));
5625 >                     (this, batch >>>= 1, baseLimit = h, f, tab,
5626 >                      rights, transformer, r, reducer)).fork();
5627 >                }
5628 >                for (Node<K,V> p; (p = advance()) != null; )
5629 >                    r = reducer.applyAsInt(r, transformer.applyAsInt(p.val));
5630                  result = r;
5631                  CountedCompleter<?> c;
5632                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
# Line 6866 | Line 5642 | public class ConcurrentHashMap<K,V>
5642          }
5643      }
5644  
5645 <    @SuppressWarnings("serial") static final class MapReduceEntriesToIntTask<K,V>
5646 <        extends Traverser<K,V,Integer> {
5645 >    static final class MapReduceEntriesToIntTask<K,V>
5646 >        extends BulkTask<K,V,Integer> {
5647          final ToIntFunction<Map.Entry<K,V>> transformer;
5648          final IntBinaryOperator reducer;
5649          final int basis;
5650          int result;
5651          MapReduceEntriesToIntTask<K,V> rights, nextRight;
5652          MapReduceEntriesToIntTask
5653 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5653 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
5654               MapReduceEntriesToIntTask<K,V> nextRight,
5655               ToIntFunction<Map.Entry<K,V>> transformer,
5656               int basis,
5657               IntBinaryOperator reducer) {
5658 <            super(m, p, b); this.nextRight = nextRight;
5658 >            super(p, b, i, f, t); this.nextRight = nextRight;
5659              this.transformer = transformer;
5660              this.basis = basis; this.reducer = reducer;
5661          }
5662          public final Integer getRawResult() { return result; }
5663 <        @SuppressWarnings("unchecked") public final void compute() {
5663 >        public final void compute() {
5664              final ToIntFunction<Map.Entry<K,V>> transformer;
5665              final IntBinaryOperator reducer;
5666              if ((transformer = this.transformer) != null &&
5667                  (reducer = this.reducer) != null) {
5668                  int r = this.basis;
5669 <                for (int b; (b = preSplit()) > 0;)
5669 >                for (int i = baseIndex, f, h; batch > 0 &&
5670 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
5671 >                    addToPendingCount(1);
5672                      (rights = new MapReduceEntriesToIntTask<K,V>
5673 <                     (map, this, b, rights, transformer, r, reducer)).fork();
5674 <                V v;
5675 <                while ((v = advanceValue()) != null)
5676 <                    r = reducer.applyAsInt(r, transformer.applyAsInt(entryFor(nextKey,
5677 <                                                                    v)));
5673 >                     (this, batch >>>= 1, baseLimit = h, f, tab,
5674 >                      rights, transformer, r, reducer)).fork();
5675 >                }
5676 >                for (Node<K,V> p; (p = advance()) != null; )
5677 >                    r = reducer.applyAsInt(r, transformer.applyAsInt(p));
5678                  result = r;
5679                  CountedCompleter<?> c;
5680                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
# Line 6912 | Line 5690 | public class ConcurrentHashMap<K,V>
5690          }
5691      }
5692  
5693 <    @SuppressWarnings("serial") static final class MapReduceMappingsToIntTask<K,V>
5694 <        extends Traverser<K,V,Integer> {
5693 >    static final class MapReduceMappingsToIntTask<K,V>
5694 >        extends BulkTask<K,V,Integer> {
5695          final ToIntBiFunction<? super K, ? super V> transformer;
5696          final IntBinaryOperator reducer;
5697          final int basis;
5698          int result;
5699          MapReduceMappingsToIntTask<K,V> rights, nextRight;
5700          MapReduceMappingsToIntTask
5701 <            (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5701 >            (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
5702               MapReduceMappingsToIntTask<K,V> nextRight,
5703               ToIntBiFunction<? super K, ? super V> transformer,
5704               int basis,
5705               IntBinaryOperator reducer) {
5706 <            super(m, p, b); this.nextRight = nextRight;
5706 >            super(p, b, i, f, t); this.nextRight = nextRight;
5707              this.transformer = transformer;
5708              this.basis = basis; this.reducer = reducer;
5709          }
5710          public final Integer getRawResult() { return result; }
5711 <        @SuppressWarnings("unchecked") public final void compute() {
5711 >        public final void compute() {
5712              final ToIntBiFunction<? super K, ? super V> transformer;
5713              final IntBinaryOperator reducer;
5714              if ((transformer = this.transformer) != null &&
5715                  (reducer = this.reducer) != null) {
5716                  int r = this.basis;
5717 <                for (int b; (b = preSplit()) > 0;)
5717 >                for (int i = baseIndex, f, h; batch > 0 &&
5718 >                         (h = ((f = baseLimit) + i) >>> 1) > i;) {
5719 >                    addToPendingCount(1);
5720                      (rights = new MapReduceMappingsToIntTask<K,V>
5721 <                     (map, this, b, rights, transformer, r, reducer)).fork();
5722 <                V v;
5723 <                while ((v = advanceValue()) != null)
5724 <                    r = reducer.applyAsInt(r, transformer.applyAsInt(nextKey, v));
5721 >                     (this, batch >>>= 1, baseLimit = h, f, tab,
5722 >                      rights, transformer, r, reducer)).fork();
5723 >                }
5724 >                for (Node<K,V> p; (p = advance()) != null; )
5725 >                    r = reducer.applyAsInt(r, transformer.applyAsInt((K)p.key, p.val));
5726                  result = r;
5727                  CountedCompleter<?> c;
5728                  for (c = firstComplete(); c != null; c = c.nextComplete()) {
# Line 6995 | Line 5776 | public class ConcurrentHashMap<K,V>
5776              throw new Error(e);
5777          }
5778      }
6998
5779   }

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines