--- jsr166/src/jsr166e/ConcurrentHashMapV8.java 2012/07/07 13:01:53 1.50 +++ jsr166/src/jsr166e/ConcurrentHashMapV8.java 2012/10/30 16:46:09 1.73 @@ -5,7 +5,8 @@ */ package jsr166e; -import jsr166e.LongAdder; + +import java.util.Comparator; import java.util.Arrays; import java.util.Map; import java.util.Set; @@ -23,6 +24,8 @@ import java.util.concurrent.ConcurrentMa import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.locks.LockSupport; import java.util.concurrent.locks.AbstractQueuedSynchronizer; +import java.util.concurrent.atomic.AtomicReference; + import java.io.Serializable; /** @@ -41,19 +44,22 @@ import java.io.Serializable; * block, so may overlap with update operations (including {@code put} * and {@code remove}). Retrievals reflect the results of the most * recently completed update operations holding upon their - * onset. For aggregate operations such as {@code putAll} and {@code - * clear}, concurrent retrievals may reflect insertion or removal of - * only some entries. Similarly, Iterators and Enumerations return - * elements reflecting the state of the hash table at some point at or - * since the creation of the iterator/enumeration. They do - * not throw {@link ConcurrentModificationException}. - * However, iterators are designed to be used by only one thread at a - * time. Bear in mind that the results of aggregate status methods - * including {@code size}, {@code isEmpty}, and {@code containsValue} - * are typically useful only when a map is not undergoing concurrent - * updates in other threads. Otherwise the results of these methods - * reflect transient states that may be adequate for monitoring - * or estimation purposes, but not for program control. + * onset. (More formally, an update operation for a given key bears a + * happens-before relation with any (non-null) retrieval for + * that key reporting the updated value.) For aggregate operations + * such as {@code putAll} and {@code clear}, concurrent retrievals may + * reflect insertion or removal of only some entries. Similarly, + * Iterators and Enumerations return elements reflecting the state of + * the hash table at some point at or since the creation of the + * iterator/enumeration. They do not throw {@link + * ConcurrentModificationException}. However, iterators are designed + * to be used by only one thread at a time. Bear in mind that the + * results of aggregate status methods including {@code size}, {@code + * isEmpty}, and {@code containsValue} are typically useful only when + * a map is not undergoing concurrent updates in other threads. + * Otherwise the results of these methods reflect transient states + * that may be adequate for monitoring or estimation purposes, but not + * for program control. * *

The table is dynamically expanded when there are too many * collisions (i.e., keys that have distinct hash codes but fall into @@ -76,6 +82,19 @@ import java.io.Serializable; * {@code hashCode()} is a sure way to slow down performance of any * hash table. * + *

A {@link Set} projection of a ConcurrentHashMapV8 may be created + * (using {@link #newKeySet()} or {@link #newKeySet(int)}), or viewed + * (using {@link #keySet(Object)} when only keys are of interest, and the + * mapped values are (perhaps transiently) not used or all take the + * same mapping value. + * + *

A ConcurrentHashMapV8 can be used as scalable frequency map (a + * form of histogram or multiset) by using {@link LongAdder} values + * and initializing via {@link #computeIfAbsent}. For example, to add + * a count to a {@code ConcurrentHashMapV8 freqs}, you + * can use {@code freqs.computeIfAbsent(k -> new + * LongAdder()).increment();} + * *

This class and its views and iterators implement all of the * optional methods of the {@link Map} and {@link Iterator} * interfaces. @@ -83,54 +102,121 @@ import java.io.Serializable; *

Like {@link Hashtable} but unlike {@link HashMap}, this class * does not allow {@code null} to be used as a key or value. * + *

ConcurrentHashMapV8s support parallel operations using the {@link + * ForkJoinPool#commonPool}. (Tasks that may be used in other contexts + * are available in class {@link ForkJoinTasks}). These operations are + * designed to be safely, and often sensibly, applied even with maps + * that are being concurrently updated by other threads; for example, + * when computing a snapshot summary of the values in a shared + * registry. There are three kinds of operation, each with four + * forms, accepting functions with Keys, Values, Entries, and (Key, + * Value) arguments and/or return values. Because the elements of a + * ConcurrentHashMapV8 are not ordered in any particular way, and may be + * processed in different orders in different parallel executions, the + * correctness of supplied functions should not depend on any + * ordering, or on any other objects or values that may transiently + * change while computation is in progress; and except for forEach + * actions, should ideally be side-effect-free. + * + *

+ * + *

The concurrency properties of bulk operations follow + * from those of ConcurrentHashMapV8: Any non-null result returned + * from {@code get(key)} and related access methods bears a + * happens-before relation with the associated insertion or + * update. The result of any bulk operation reflects the + * composition of these per-element relations (but is not + * necessarily atomic with respect to the map as a whole unless it + * is somehow known to be quiescent). Conversely, because keys + * and values in the map are never null, null serves as a reliable + * atomic indicator of the current lack of any result. To + * maintain this property, null serves as an implicit basis for + * all non-scalar reduction operations. For the double, long, and + * int versions, the basis should be one that, when combined with + * any other value, returns that other value (more formally, it + * should be the identity element for the reduction). Most common + * reductions have these properties; for example, computing a sum + * with basis 0 or a minimum with basis MAX_VALUE. + * + *

Search and transformation functions provided as arguments + * should similarly return null to indicate the lack of any result + * (in which case it is not used). In the case of mapped + * reductions, this also enables transformations to serve as + * filters, returning null (or, in the case of primitive + * specializations, the identity basis) if the element should not + * be combined. You can create compound transformations and + * filterings by composing them yourself under this "null means + * there is nothing there now" rule before using them in search or + * reduce operations. + * + *

Methods accepting and/or returning Entry arguments maintain + * key-value associations. They may be useful for example when + * finding the key for the greatest value. Note that "plain" Entry + * arguments can be supplied using {@code new + * AbstractMap.SimpleEntry(k,v)}. + * + *

Bulk operations may complete abruptly, throwing an + * exception encountered in the application of a supplied + * function. Bear in mind when handling such exceptions that other + * concurrently executing functions could also have thrown + * exceptions, or would have done so if the first exception had + * not occurred. + * + *

Parallel speedups for bulk operations compared to sequential + * processing are common but not guaranteed. Operations involving + * brief functions on small maps may execute more slowly than + * sequential loops if the underlying work to parallelize the + * computation is more expensive than the computation + * itself. Similarly, parallelization may not lead to much actual + * parallelism if all processors are busy performing unrelated tasks. + * + *

All arguments to all task methods must be non-null. + * + *

jsr166e note: During transition, this class + * uses nested functional interfaces with different names but the + * same forms as those expected for JDK8. + * *

This class is a member of the * * Java Collections Framework. * - *

jsr166e note: This class is a candidate replacement for - * java.util.concurrent.ConcurrentHashMap. - * * @since 1.5 * @author Doug Lea * @param the type of keys maintained by this map * @param the type of mapped values */ public class ConcurrentHashMapV8 - implements ConcurrentMap, Serializable { + implements ConcurrentMap, Serializable { private static final long serialVersionUID = 7249069246763182397L; /** - * A function computing a mapping from the given key to a value. - * This is a place-holder for an upcoming JDK8 interface. - */ - public static interface MappingFunction { - /** - * Returns a value for the given key, or null if there is no mapping. - * - * @param key the (non-null) key - * @return a value for the key, or null if none - */ - V map(K key); - } - - /** - * A function computing a new mapping given a key and its current - * mapped value (or {@code null} if there is no current - * mapping). This is a place-holder for an upcoming JDK8 - * interface. - */ - public static interface RemappingFunction { - /** - * Returns a new value given a key and its current value. - * - * @param key the (non-null) key - * @param value the current value, or null if there is no mapping - * @return a value for the key, or null if none - */ - V remap(K key, V value); - } - - /** * A partitionable iterator. A Spliterator can be traversed * directly, but can also be partitioned (before traversal) by * creating another Spliterator that covers a non-overlapping @@ -150,13 +236,15 @@ public class ConcurrentHashMapV8 * *

      * {@code ConcurrentHashMapV8 m = ...
-     * // Uses parallel depth of log2 of size / (parallelism * slack of 8).
-     * int depth = 32 - Integer.numberOfLeadingZeros(m.size() / (aForkJoinPool.getParallelism() * 8));
-     * long sum = aForkJoinPool.invoke(new SumValues(m.valueSpliterator(), depth, null));
+     * // split as if have 8 * parallelism, for load balance
+     * int n = m.size();
+     * int p = aForkJoinPool.getParallelism() * 8;
+     * int split = (n < p)? n : p;
+     * long sum = aForkJoinPool.invoke(new SumValues(m.valueSpliterator(), split, null));
      * // ...
      * static class SumValues extends RecursiveTask {
      *   final Spliterator s;
-     *   final int depth;             // number of splits before processing
+     *   final int split;             // split while > 1
      *   final SumValues nextJoin;    // records forked subtasks to join
      *   SumValues(Spliterator s, int depth, SumValues nextJoin) {
      *     this.s = s; this.depth = depth; this.nextJoin = nextJoin;
@@ -164,8 +252,8 @@ public class ConcurrentHashMapV8
      *   public Long compute() {
      *     long sum = 0;
      *     SumValues subtasks = null; // fork subtasks
-     *     for (int d = depth - 1; d >= 0; --d)
-     *       (subtasks = new SumValues(s.split(), d, subtasks)).fork();
+     *     for (int s = split >>> 1; s > 0; s >>>= 1)
+     *       (subtasks = new SumValues(s.split(), s, subtasks)).fork();
      *     while (s.hasNext())        // directly process remaining elements
      *       sum += s.next();
      *     for (SumValues t = subtasks; t != null; t = t.nextJoin)
@@ -197,6 +285,77 @@ public class ConcurrentHashMapV8
         Spliterator split();
     }
 
+    /**
+     * A view of a ConcurrentHashMapV8 as a {@link Set} of keys, in
+     * which additions may optionally be enabled by mapping to a
+     * common value.  This class cannot be directly instantiated. See
+     * {@link #keySet}, {@link #keySet(Object)}, {@link #newKeySet()},
+     * {@link #newKeySet(int)}.
+     *
+     * 

The view's {@code iterator} is a "weakly consistent" iterator + * that will never throw {@link ConcurrentModificationException}, + * and guarantees to traverse elements as they existed upon + * construction of the iterator, and may (but is not guaranteed to) + * reflect any modifications subsequent to construction. + */ + public static class KeySetView extends CHMView implements Set, java.io.Serializable { + private static final long serialVersionUID = 7249069246763182397L; + private final V value; + KeySetView(ConcurrentHashMapV8 map, V value) { // non-public + super(map); + this.value = value; + } + + /** + * Returns the map backing this view. + * + * @return the map backing this view + */ + public ConcurrentHashMapV8 getMap() { return map; } + + /** + * Returns the default mapped value for additions, + * or {@code null} if additions are not supported. + * + * @return the default mapped value for additions, or {@code null} + * if not supported. + */ + public V getMappedValue() { return value; } + + // implement Set API + + public boolean contains(Object o) { return map.containsKey(o); } + public boolean remove(Object o) { return map.remove(o) != null; } + public Iterator iterator() { return new KeyIterator(map); } + public boolean add(K e) { + V v; + if ((v = value) == null) + throw new UnsupportedOperationException(); + if (e == null) + throw new NullPointerException(); + return map.internalPutIfAbsent(e, v) == null; + } + public boolean addAll(Collection c) { + boolean added = false; + V v; + if ((v = value) == null) + throw new UnsupportedOperationException(); + for (K e : c) { + if (e == null) + throw new NullPointerException(); + if (map.internalPutIfAbsent(e, v) == null) + added = true; + } + return added; + } + public boolean equals(Object o) { + Set c; + return ((o instanceof Set) && + ((c = (Set)o) == this || + (containsAll(c) && c.containsAll(this)))); + } + } + /* * Overview: * @@ -348,7 +507,7 @@ public class ConcurrentHashMapV8 * When there are no lock acquisition failures, this is arranged * simply by proceeding from the last bin (table.length - 1) up * towards the first. Upon seeing a forwarding node, traversals - * (see class InternalIterator) arrange to move to the new table + * (see class Iter) arrange to move to the new table * without revisiting nodes. However, when any node is skipped * during a transfer, all earlier table bins may have become * visible, so are initialized with a reverse-forwarding node back @@ -358,7 +517,7 @@ public class ConcurrentHashMapV8 * mechanics trigger only when necessary. * * The traversal scheme also applies to partial traversals of - * ranges of bins (via an alternate InternalIterator constructor) + * ranges of bins (via an alternate Traverser constructor) * to support partitioned aggregate operations. Also, read-only * operations give up if ever forwarded to a null table, which * provides support for shutdown-style clearing, which is also not @@ -479,7 +638,7 @@ public class ConcurrentHashMapV8 private transient volatile int sizeCtl; // views - private transient KeySet keySet; + private transient KeySetView keySet; private transient Values values; private transient EntrySet entrySet; @@ -500,7 +659,7 @@ public class ConcurrentHashMapV8 * inline assignments below. */ - static final Node tabAt(Node[] tab, int i) { // used by InternalIterator + static final Node tabAt(Node[] tab, int i) { // used by Iter return (Node)UNSAFE.getObjectVolatile(tab, ((long)i< * unlocking lock (via a failed CAS from non-waiting LOCKED * state), unlockers acquire the sync lock and perform a * notifyAll. + * + * The initial sanity check on tab and bounds is not currently + * necessary in the only usages of this method, but enables + * use in other future contexts. */ final void tryAwaitLock(Node[] tab, int i) { - if (tab != null && i >= 0 && i < tab.length) { // bounds check + if (tab != null && i >= 0 && i < tab.length) { // sanity check int r = ThreadLocalRandom.current().nextInt(); // randomize spins int spins = MAX_SPINS, h; while (tabAt(tab, i) == this && ((h = hash) & LOCKED) != 0) { @@ -648,7 +811,7 @@ public class ConcurrentHashMapV8 * TreeBins also maintain a separate locking discipline than * regular bins. Because they are forwarded via special MOVED * nodes at bin heads (which can never change once established), - * we cannot use use those nodes as locks. Instead, TreeBin + * we cannot use those nodes as locks. Instead, TreeBin * extends AbstractQueuedSynchronizer to support a simple form of * read-write lock. For update operations and table validation, * the exclusive form of lock behaves in the same way as bin-head @@ -733,11 +896,11 @@ public class ConcurrentHashMapV8 } /** - * Return the TreeNode (or null if not found) for the given key + * Returns the TreeNode (or null if not found) for the given key * starting at given root. */ - @SuppressWarnings("unchecked") // suppress Comparable cast warning - final TreeNode getTreeNode(int h, Object k, TreeNode p) { + @SuppressWarnings("unchecked") final TreeNode getTreeNode + (int h, Object k, TreeNode p) { Class c = k.getClass(); while (p != null) { int dir, ph; Object pk; Class pc; @@ -797,8 +960,8 @@ public class ConcurrentHashMapV8 * Finds or adds a node. * @return null if added */ - @SuppressWarnings("unchecked") // suppress Comparable cast warning - final TreeNode putTreeNode(int h, Object k, Object v) { + @SuppressWarnings("unchecked") final TreeNode putTreeNode + (int h, Object k, Object v) { Class c = k.getClass(); TreeNode pp = root, p = null; int dir = 0; @@ -1225,7 +1388,7 @@ public class ConcurrentHashMapV8 } /* - * Internal versions of the five insertion methods, each a + * Internal versions of the six insertion methods, each a * little more complicated than the last. All have * the same basic structure as the first (internalPut): * 1. If table uninitialized, create @@ -1243,6 +1406,8 @@ public class ConcurrentHashMapV8 * returns from function call. * * compute uses the same function-call mechanics, but without * the prescans + * * merge acts as putIfAbsent in the absent case, but invokes the + * update function if present * * putAll attempts to pre-allocate enough table space * and more lazily performs count updates and checks. * @@ -1439,7 +1604,7 @@ public class ConcurrentHashMapV8 /** Implementation for computeIfAbsent */ private final Object internalComputeIfAbsent(K k, - MappingFunction mf) { + Fun mf) { int h = spread(k.hashCode()); Object val = null; int count = 0; @@ -1452,7 +1617,7 @@ public class ConcurrentHashMapV8 if (casTabAt(tab, i, null, node)) { count = 1; try { - if ((val = mf.map(k)) != null) + if ((val = mf.apply(k)) != null) node.val = val; } finally { if (val == null) @@ -1477,7 +1642,7 @@ public class ConcurrentHashMapV8 TreeNode p = t.getTreeNode(h, k, t.root); if (p != null) val = p.val; - else if ((val = mf.map(k)) != null) { + else if ((val = mf.apply(k)) != null) { added = true; count = 2; t.putTreeNode(h, k, val); @@ -1531,7 +1696,7 @@ public class ConcurrentHashMapV8 } Node last = e; if ((e = e.next) == null) { - if ((val = mf.map(k)) != null) { + if ((val = mf.apply(k)) != null) { added = true; last.next = new Node(h, k, val, null); if (count >= TREE_THRESHOLD) @@ -1566,9 +1731,8 @@ public class ConcurrentHashMapV8 } /** Implementation for compute */ - @SuppressWarnings("unchecked") - private final Object internalCompute(K k, - RemappingFunction mf) { + @SuppressWarnings("unchecked") private final Object internalCompute + (K k, boolean onlyIfPresent, BiFun mf) { int h = spread(k.hashCode()); Object val = null; int delta = 0; @@ -1578,11 +1742,13 @@ public class ConcurrentHashMapV8 if (tab == null) tab = initTable(); else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) { + if (onlyIfPresent) + break; Node node = new Node(fh = h | LOCKED, k, null, null); if (casTabAt(tab, i, null, node)) { try { count = 1; - if ((val = mf.remap(k, null)) != null) { + if ((val = mf.apply(k, null)) != null) { node.val = val; delta = 1; } @@ -1607,7 +1773,7 @@ public class ConcurrentHashMapV8 count = 1; TreeNode p = t.getTreeNode(h, k, t.root); Object pv = (p == null) ? null : p.val; - if ((val = mf.remap(k, (V)pv)) != null) { + if ((val = mf.apply(k, (V)pv)) != null) { if (p != null) p.val = val; else { @@ -1643,7 +1809,7 @@ public class ConcurrentHashMapV8 if ((e.hash & HASH_BITS) == h && (ev = e.val) != null && ((ek = e.key) == k || k.equals(ek))) { - val = mf.remap(k, (V)ev); + val = mf.apply(k, (V)ev); if (val != null) e.val = val; else { @@ -1658,7 +1824,7 @@ public class ConcurrentHashMapV8 } pred = e; if ((e = e.next) == null) { - if ((val = mf.remap(k, null)) != null) { + if (!onlyIfPresent && (val = mf.apply(k, null)) != null) { pred.next = new Node(h, k, val, null); delta = 1; if (count >= TREE_THRESHOLD) @@ -1689,6 +1855,114 @@ public class ConcurrentHashMapV8 return val; } + /** Implementation for merge */ + @SuppressWarnings("unchecked") private final Object internalMerge + (K k, V v, BiFun mf) { + int h = spread(k.hashCode()); + Object val = null; + int delta = 0; + int count = 0; + for (Node[] tab = table;;) { + int i; Node f; int fh; Object fk, fv; + if (tab == null) + tab = initTable(); + else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) { + if (casTabAt(tab, i, null, new Node(h, k, v, null))) { + delta = 1; + val = v; + break; + } + } + else if ((fh = f.hash) == MOVED) { + if ((fk = f.key) instanceof TreeBin) { + TreeBin t = (TreeBin)fk; + t.acquire(0); + try { + if (tabAt(tab, i) == f) { + count = 1; + TreeNode p = t.getTreeNode(h, k, t.root); + val = (p == null) ? v : mf.apply((V)p.val, v); + if (val != null) { + if (p != null) + p.val = val; + else { + count = 2; + delta = 1; + t.putTreeNode(h, k, val); + } + } + else if (p != null) { + delta = -1; + t.deleteTreeNode(p); + } + } + } finally { + t.release(0); + } + if (count != 0) + break; + } + else + tab = (Node[])fk; + } + else if ((fh & LOCKED) != 0) { + checkForResize(); + f.tryAwaitLock(tab, i); + } + else if (f.casHash(fh, fh | LOCKED)) { + try { + if (tabAt(tab, i) == f) { + count = 1; + for (Node e = f, pred = null;; ++count) { + Object ek, ev; + if ((e.hash & HASH_BITS) == h && + (ev = e.val) != null && + ((ek = e.key) == k || k.equals(ek))) { + val = mf.apply(v, (V)ev); + if (val != null) + e.val = val; + else { + delta = -1; + Node en = e.next; + if (pred != null) + pred.next = en; + else + setTabAt(tab, i, en); + } + break; + } + pred = e; + if ((e = e.next) == null) { + val = v; + pred.next = new Node(h, k, val, null); + delta = 1; + if (count >= TREE_THRESHOLD) + replaceWithTreeBin(tab, i, k); + break; + } + } + } + } finally { + if (!f.casHash(fh | LOCKED, fh)) { + f.hash = fh; + synchronized (f) { f.notifyAll(); }; + } + } + if (count != 0) { + if (tab.length <= 64) + count = 2; + break; + } + } + } + if (delta != 0) { + counter.add((long)delta); + if (count > 1) + checkForResize(); + } + return val; + } + /** Implementation for putAll */ private final void internalPutAll(Map m) { tryPresize(m.size()); @@ -1913,7 +2187,7 @@ public class ConcurrentHashMapV8 for (int i = bin;;) { // start upwards sweep int fh; Node f; if ((f = tabAt(tab, i)) == null) { - if (bin >= 0) { // no lock needed (or available) + if (bin >= 0) { // Unbuffered; no lock needed (or available) if (!casTabAt(tab, i, f, fwd)) continue; } @@ -2089,8 +2363,10 @@ public class ConcurrentHashMapV8 try { if (tabAt(tab, i) == f) { for (Node p = t.first; p != null; p = p.next) { - p.val = null; - --delta; + if (p.val != null) { // (currently always true) + p.val = null; + --delta; + } } t.first = null; t.root = null; @@ -2112,8 +2388,10 @@ public class ConcurrentHashMapV8 try { if (tabAt(tab, i) == f) { for (Node e = f; e != null; e = e.next) { - e.val = null; - --delta; + if (e.val != null) { // (currently always true) + e.val = null; + --delta; + } } setTabAt(tab, i, null); ++i; @@ -2134,7 +2412,7 @@ public class ConcurrentHashMapV8 /** * Encapsulates traversal for methods such as containsValue; also - * serves as a base class for other iterators. + * serves as a base class for other iterators and bulk tasks. * * At each step, the iterator snapshots the key ("nextKey") and * value ("nextVal") of a valid node (i.e., one that, at point of @@ -2142,7 +2420,8 @@ public class ConcurrentHashMapV8 * change (including to null, indicating deletion), field nextVal * might not be accurate at point of use, but still maintains the * weak consistency property of holding a value that was once - * valid. + * valid. To support iterator.remove, the nextKey field is not + * updated (nulled out) when the iterator cannot advance. * * Internal traversals directly access these fields, as in: * {@code while (it.advance() != null) { process(it.nextKey); }} @@ -2168,34 +2447,42 @@ public class ConcurrentHashMapV8 * paranoically cope with potential sharing by users of iterators * across threads, iteration terminates if a bounds checks fails * for a table read. + * + * This class extends ForkJoinTask to streamline parallel + * iteration in bulk operations (see BulkTask). This adds only an + * int of space overhead, which is close enough to negligible in + * cases where it is not needed to not worry about it. Because + * ForkJoinTask is Serializable, but iterators need not be, we + * need to add warning suppressions. */ - static class InternalIterator { + @SuppressWarnings("serial") static class Traverser extends ForkJoinTask { final ConcurrentHashMapV8 map; Node next; // the next entry to use - Node last; // the last entry used Object nextKey; // cached key field of next Object nextVal; // cached val field of next Node[] tab; // current table; updated if resized int index; // index of bin to use next int baseIndex; // current index of initial table int baseLimit; // index bound for initial table - final int baseSize; // initial table size + int baseSize; // initial table size /** Creates iterator for all entries in the table. */ - InternalIterator(ConcurrentHashMapV8 map) { - this.tab = (this.map = map).table; - baseLimit = baseSize = (tab == null) ? 0 : tab.length; + Traverser(ConcurrentHashMapV8 map) { + this.map = map; } - /** Creates iterator for clone() and split() methods. */ - InternalIterator(InternalIterator it, boolean split) { - this.map = it.map; - this.tab = it.tab; + /** Creates iterator for split() methods */ + Traverser(Traverser it) { + ConcurrentHashMapV8 m; Node[] t; + if ((m = this.map = it.map) == null) + t = null; + else if ((t = it.tab) == null && // force parent tab initialization + (t = it.tab = m.table) != null) + it.baseLimit = it.baseSize = t.length; + this.tab = t; this.baseSize = it.baseSize; - int lo = it.baseIndex; - int hi = this.baseLimit = it.baseLimit; - this.index = this.baseIndex = - (split) ? (it.baseLimit = (lo + hi + 1) >>> 1) : lo; + it.baseLimit = this.index = this.baseIndex = + ((this.baseLimit = it.baseLimit) + it.baseIndex + 1) >>> 1; } /** @@ -2203,17 +2490,24 @@ public class ConcurrentHashMapV8 * See above for explanation. */ final Object advance() { - Node e = last = next; + Node e = next; Object ev = null; outer: do { if (e != null) // advance past used/skipped node e = e.next; while (e == null) { // get to next non-null bin + ConcurrentHashMapV8 m; Node[] t; int b, i, n; Object ek; // checks must use locals - if ((b = baseIndex) >= baseLimit || (i = index) < 0 || - (t = tab) == null || i >= (n = t.length)) + if ((t = tab) != null) + n = t.length; + else if ((m = map) != null && (t = tab = m.table) != null) + n = baseLimit = baseSize = t.length; + else break outer; - else if ((e = tabAt(t, i)) != null && e.hash == MOVED) { + if ((b = baseIndex) >= baseLimit || + (i = index) < 0 || i >= n) + break outer; + if ((e = tabAt(t, i)) != null && e.hash == MOVED) { if ((ek = e.key) instanceof TreeBin) e = ((TreeBin)ek).first; else { @@ -2230,13 +2524,10 @@ public class ConcurrentHashMapV8 } public final void remove() { - if (nextVal == null) - advance(); - Node e = last; - if (e == null) + Object k = nextKey; + if (k == null && (advance() == null || (k = nextKey) == null)) throw new IllegalStateException(); - last = null; - map.remove(e.key); + map.internalReplace(k, null, null); } public final boolean hasNext() { @@ -2244,6 +2535,9 @@ public class ConcurrentHashMapV8 } public final boolean hasMoreElements() { return hasNext(); } + public final void setRawResult(Object x) { } + public R getRawResult() { return null; } + public boolean exec() { return true; } } /* ---------------- Public operations -------------- */ @@ -2337,6 +2631,32 @@ public class ConcurrentHashMapV8 } /** + * Creates a new {@link Set} backed by a ConcurrentHashMapV8 + * from the given type to {@code Boolean.TRUE}. + * + * @return the new set + */ + public static KeySetView newKeySet() { + return new KeySetView(new ConcurrentHashMapV8(), + Boolean.TRUE); + } + + /** + * Creates a new {@link Set} backed by a ConcurrentHashMapV8 + * from the given type to {@code Boolean.TRUE}. + * + * @param initialCapacity The implementation performs internal + * sizing to accommodate this many elements. + * @throws IllegalArgumentException if the initial capacity of + * elements is negative + * @return the new set + */ + public static KeySetView newKeySet(int initialCapacity) { + return new KeySetView(new ConcurrentHashMapV8(initialCapacity), + Boolean.TRUE); + } + + /** * {@inheritDoc} */ public boolean isEmpty() { @@ -2353,9 +2673,18 @@ public class ConcurrentHashMapV8 (int)n); } - final long longSize() { // accurate version of size needed for views + /** + * Returns the number of mappings. This method should be used + * instead of {@link #size} because a ConcurrentHashMapV8 may + * contain more mappings than can be represented as an int. The + * value returned is a snapshot; the actual count may differ if + * there are ongoing concurrent insertions or removals. + * + * @return the number of mappings + */ + public long mappingCount() { long n = counter.sum(); - return (n < 0L) ? 0L : n; + return (n < 0L) ? 0L : n; // ignore transient negative values } /** @@ -2369,14 +2698,30 @@ public class ConcurrentHashMapV8 * * @throws NullPointerException if the specified key is null */ - @SuppressWarnings("unchecked") - public V get(Object key) { + @SuppressWarnings("unchecked") public V get(Object key) { if (key == null) throw new NullPointerException(); return (V)internalGet(key); } /** + * Returns the value to which the specified key is mapped, + * or the given defaultValue if this map contains no mapping for the key. + * + * @param key the key + * @param defaultValue the value to return if this map contains + * no mapping for the given key + * @return the mapping for the key, if present; else the defaultValue + * @throws NullPointerException if the specified key is null + */ + @SuppressWarnings("unchecked") public V getValueOrDefault(Object key, V defaultValue) { + if (key == null) + throw new NullPointerException(); + V v = (V) internalGet(key); + return v == null ? defaultValue : v; + } + + /** * Tests if the specified object is a key in this table. * * @param key possible key @@ -2405,7 +2750,7 @@ public class ConcurrentHashMapV8 if (value == null) throw new NullPointerException(); Object v; - InternalIterator it = new InternalIterator(this); + Traverser it = new Traverser(this); while ((v = it.advance()) != null) { if (v == value || value.equals(v)) return true; @@ -2445,8 +2790,7 @@ public class ConcurrentHashMapV8 * {@code null} if there was no mapping for {@code key} * @throws NullPointerException if the specified key or value is null */ - @SuppressWarnings("unchecked") - public V put(K key, V value) { + @SuppressWarnings("unchecked") public V put(K key, V value) { if (key == null || value == null) throw new NullPointerException(); return (V)internalPut(key, value); @@ -2459,8 +2803,7 @@ public class ConcurrentHashMapV8 * or {@code null} if there was no mapping for the key * @throws NullPointerException if the specified key or value is null */ - @SuppressWarnings("unchecked") - public V putIfAbsent(K key, V value) { + @SuppressWarnings("unchecked") public V putIfAbsent(K key, V value) { if (key == null || value == null) throw new NullPointerException(); return (V)internalPutIfAbsent(key, value); @@ -2484,7 +2827,7 @@ public class ConcurrentHashMapV8 *

 {@code
      * if (map.containsKey(key))
      *   return map.get(key);
-     * value = mappingFunction.map(key);
+     * value = mappingFunction.apply(key);
      * if (value != null)
      *   map.put(key, value);
      * return value;}
@@ -2501,13 +2844,13 @@ public class ConcurrentHashMapV8 * memoized result, as in: * *
 {@code
-     * map.computeIfAbsent(key, new MappingFunction() {
+     * map.computeIfAbsent(key, new Fun() {
      *   public V map(K k) { return new Value(f(k)); }});}
* * @param key key with which the specified value is to be associated * @param mappingFunction the function to compute a value * @return the current (existing or computed) value associated with - * the specified key, or null if the computed value is null. + * the specified key, or null if the computed value is null * @throws NullPointerException if the specified key or mappingFunction * is null * @throws IllegalStateException if the computation detectably @@ -2516,19 +2859,60 @@ public class ConcurrentHashMapV8 * @throws RuntimeException or Error if the mappingFunction does so, * in which case the mapping is left unestablished */ - @SuppressWarnings("unchecked") - public V computeIfAbsent(K key, MappingFunction mappingFunction) { + @SuppressWarnings("unchecked") public V computeIfAbsent + (K key, Fun mappingFunction) { if (key == null || mappingFunction == null) throw new NullPointerException(); return (V)internalComputeIfAbsent(key, mappingFunction); } /** + * If the given key is present, computes a new mapping value given a key and + * its current mapped value. This is equivalent to + *
 {@code
+     *   if (map.containsKey(key)) {
+     *     value = remappingFunction.apply(key, map.get(key));
+     *     if (value != null)
+     *       map.put(key, value);
+     *     else
+     *       map.remove(key);
+     *   }
+     * }
+ * + * except that the action is performed atomically. If the + * function returns {@code null}, the mapping is removed. If the + * function itself throws an (unchecked) exception, the exception + * is rethrown to its caller, and the current mapping is left + * unchanged. Some attempted update operations on this map by + * other threads may be blocked while computation is in progress, + * so the computation should be short and simple, and must not + * attempt to update any other mappings of this Map. For example, + * to either create or append new messages to a value mapping: + * + * @param key key with which the specified value is to be associated + * @param remappingFunction the function to compute a value + * @return the new value associated with the specified key, or null if none + * @throws NullPointerException if the specified key or remappingFunction + * is null + * @throws IllegalStateException if the computation detectably + * attempts a recursive update to this map that would + * otherwise never complete + * @throws RuntimeException or Error if the remappingFunction does so, + * in which case the mapping is unchanged + */ + @SuppressWarnings("unchecked") public V computeIfPresent + (K key, BiFun remappingFunction) { + if (key == null || remappingFunction == null) + throw new NullPointerException(); + return (V)internalCompute(key, true, remappingFunction); + } + + /** * Computes a new mapping value given a key and * its current mapped value (or {@code null} if there is no current * mapping). This is equivalent to *
 {@code
-     *   value = remappingFunction.remap(key, map.get(key));
+     *   value = remappingFunction.apply(key, map.get(key));
      *   if (value != null)
      *     map.put(key, value);
      *   else
@@ -2548,14 +2932,13 @@ public class ConcurrentHashMapV8
      * 
 {@code
      * Map map = ...;
      * final String msg = ...;
-     * map.compute(key, new RemappingFunction() {
-     *   public String remap(Key k, String v) {
+     * map.compute(key, new BiFun() {
+     *   public String apply(Key k, String v) {
      *    return (v == null) ? msg : v + msg;});}}
* * @param key key with which the specified value is to be associated * @param remappingFunction the function to compute a value - * @return the new value associated with - * the specified key, or null if none. + * @return the new value associated with the specified key, or null if none * @throws NullPointerException if the specified key or remappingFunction * is null * @throws IllegalStateException if the computation detectably @@ -2564,11 +2947,43 @@ public class ConcurrentHashMapV8 * @throws RuntimeException or Error if the remappingFunction does so, * in which case the mapping is unchanged */ - @SuppressWarnings("unchecked") - public V compute(K key, RemappingFunction remappingFunction) { + @SuppressWarnings("unchecked") public V compute + (K key, BiFun remappingFunction) { if (key == null || remappingFunction == null) throw new NullPointerException(); - return (V)internalCompute(key, remappingFunction); + return (V)internalCompute(key, false, remappingFunction); + } + + /** + * If the specified key is not already associated + * with a value, associate it with the given value. + * Otherwise, replace the value with the results of + * the given remapping function. This is equivalent to: + *
 {@code
+     *   if (!map.containsKey(key))
+     *     map.put(value);
+     *   else {
+     *     newValue = remappingFunction.apply(map.get(key), value);
+     *     if (value != null)
+     *       map.put(key, value);
+     *     else
+     *       map.remove(key);
+     *   }
+     * }
+ * except that the action is performed atomically. If the + * function returns {@code null}, the mapping is removed. If the + * function itself throws an (unchecked) exception, the exception + * is rethrown to its caller, and the current mapping is left + * unchanged. Some attempted update operations on this map by + * other threads may be blocked while computation is in progress, + * so the computation should be short and simple, and must not + * attempt to update any other mappings of this Map. + */ + @SuppressWarnings("unchecked") public V merge + (K key, V value, BiFun remappingFunction) { + if (key == null || value == null || remappingFunction == null) + throw new NullPointerException(); + return (V)internalMerge(key, value, remappingFunction); } /** @@ -2580,8 +2995,7 @@ public class ConcurrentHashMapV8 * {@code null} if there was no mapping for {@code key} * @throws NullPointerException if the specified key is null */ - @SuppressWarnings("unchecked") - public V remove(Object key) { + @SuppressWarnings("unchecked") public V remove(Object key) { if (key == null) throw new NullPointerException(); return (V)internalReplace(key, null, null); @@ -2618,8 +3032,7 @@ public class ConcurrentHashMapV8 * or {@code null} if there was no mapping for the key * @throws NullPointerException if the specified key or value is null */ - @SuppressWarnings("unchecked") - public V replace(K key, V value) { + @SuppressWarnings("unchecked") public V replace(K key, V value) { if (key == null || value == null) throw new NullPointerException(); return (V)internalReplace(key, value, null); @@ -2635,22 +3048,31 @@ public class ConcurrentHashMapV8 /** * Returns a {@link Set} view of the keys contained in this map. * The set is backed by the map, so changes to the map are - * reflected in the set, and vice-versa. The set supports element - * removal, which removes the corresponding mapping from this map, - * via the {@code Iterator.remove}, {@code Set.remove}, - * {@code removeAll}, {@code retainAll}, and {@code clear} - * operations. It does not support the {@code add} or - * {@code addAll} operations. + * reflected in the set, and vice-versa. * - *

The view's {@code iterator} is a "weakly consistent" iterator - * that will never throw {@link ConcurrentModificationException}, - * and guarantees to traverse elements as they existed upon - * construction of the iterator, and may (but is not guaranteed to) - * reflect any modifications subsequent to construction. + * @return the set view + */ + public KeySetView keySet() { + KeySetView ks = keySet; + return (ks != null) ? ks : (keySet = new KeySetView(this, null)); + } + + /** + * Returns a {@link Set} view of the keys in this map, using the + * given common mapped value for any additions (i.e., {@link + * Collection#add} and {@link Collection#addAll}). This is of + * course only appropriate if it is acceptable to use the same + * value for all additions from this view. + * + * @param mappedValue the mapped value to use for any + * additions. + * @return the set view + * @throws NullPointerException if the mappedValue is null */ - public Set keySet() { - KeySet ks = keySet; - return (ks != null) ? ks : (keySet = new KeySet(this)); + public KeySetView keySet(V mappedValue) { + if (mappedValue == null) + throw new NullPointerException(); + return new KeySetView(this, mappedValue); } /** @@ -2716,27 +3138,27 @@ public class ConcurrentHashMapV8 } /** - * Returns a partionable iterator of the keys in this map. + * Returns a partitionable iterator of the keys in this map. * - * @return a partionable iterator of the keys in this map + * @return a partitionable iterator of the keys in this map */ public Spliterator keySpliterator() { return new KeyIterator(this); } /** - * Returns a partionable iterator of the values in this map. + * Returns a partitionable iterator of the values in this map. * - * @return a partionable iterator of the values in this map + * @return a partitionable iterator of the values in this map */ public Spliterator valueSpliterator() { return new ValueIterator(this); } /** - * Returns a partionable iterator of the entries in this map. + * Returns a partitionable iterator of the entries in this map. * - * @return a partionable iterator of the entries in this map + * @return a partitionable iterator of the entries in this map */ public Spliterator> entrySpliterator() { return new EntryIterator(this); @@ -2751,7 +3173,7 @@ public class ConcurrentHashMapV8 */ public int hashCode() { int h = 0; - InternalIterator it = new InternalIterator(this); + Traverser it = new Traverser(this); Object v; while ((v = it.advance()) != null) { h += it.nextKey.hashCode() ^ v.hashCode(); @@ -2771,7 +3193,7 @@ public class ConcurrentHashMapV8 * @return a string representation of this map */ public String toString() { - InternalIterator it = new InternalIterator(this); + Traverser it = new Traverser(this); StringBuilder sb = new StringBuilder(); sb.append('{'); Object v; @@ -2804,7 +3226,7 @@ public class ConcurrentHashMapV8 if (!(o instanceof Map)) return false; Map m = (Map) o; - InternalIterator it = new InternalIterator(this); + Traverser it = new Traverser(this); Object val; while ((val = it.advance()) != null) { Object v = m.get(it.nextKey); @@ -2825,25 +3247,18 @@ public class ConcurrentHashMapV8 /* ----------------Iterators -------------- */ - static final class KeyIterator extends InternalIterator + @SuppressWarnings("serial") static final class KeyIterator extends Traverser implements Spliterator, Enumeration { KeyIterator(ConcurrentHashMapV8 map) { super(map); } - KeyIterator(InternalIterator it, boolean split) { - super(it, split); + KeyIterator(Traverser it) { + super(it); } public KeyIterator split() { - if (last != null || (next != null && nextVal == null)) - throw new IllegalStateException(); - return new KeyIterator(this, true); - } - public KeyIterator clone() { - if (last != null || (next != null && nextVal == null)) + if (nextKey != null) throw new IllegalStateException(); - return new KeyIterator(this, false); + return new KeyIterator(this); } - - @SuppressWarnings("unchecked") - public final K next() { + @SuppressWarnings("unchecked") public final K next() { if (nextVal == null && advance() == null) throw new NoSuchElementException(); Object k = nextKey; @@ -2854,26 +3269,19 @@ public class ConcurrentHashMapV8 public final K nextElement() { return next(); } } - static final class ValueIterator extends InternalIterator + @SuppressWarnings("serial") static final class ValueIterator extends Traverser implements Spliterator, Enumeration { ValueIterator(ConcurrentHashMapV8 map) { super(map); } - ValueIterator(InternalIterator it, boolean split) { - super(it, split); + ValueIterator(Traverser it) { + super(it); } public ValueIterator split() { - if (last != null || (next != null && nextVal == null)) - throw new IllegalStateException(); - return new ValueIterator(this, true); - } - - public ValueIterator clone() { - if (last != null || (next != null && nextVal == null)) + if (nextKey != null) throw new IllegalStateException(); - return new ValueIterator(this, false); + return new ValueIterator(this); } - @SuppressWarnings("unchecked") - public final V next() { + @SuppressWarnings("unchecked") public final V next() { Object v; if ((v = nextVal) == null && (v = advance()) == null) throw new NoSuchElementException(); @@ -2884,25 +3292,19 @@ public class ConcurrentHashMapV8 public final V nextElement() { return next(); } } - static final class EntryIterator extends InternalIterator + @SuppressWarnings("serial") static final class EntryIterator extends Traverser implements Spliterator> { EntryIterator(ConcurrentHashMapV8 map) { super(map); } - EntryIterator(InternalIterator it, boolean split) { - super(it, split); + EntryIterator(Traverser it) { + super(it); } public EntryIterator split() { - if (last != null || (next != null && nextVal == null)) + if (nextKey != null) throw new IllegalStateException(); - return new EntryIterator(this, true); - } - public EntryIterator clone() { - if (last != null || (next != null && nextVal == null)) - throw new IllegalStateException(); - return new EntryIterator(this, false); + return new EntryIterator(this); } - @SuppressWarnings("unchecked") - public final Map.Entry next() { + @SuppressWarnings("unchecked") public final Map.Entry next() { Object v; if ((v = nextVal) == null && (v = advance()) == null) throw new NoSuchElementException(); @@ -2960,9 +3362,9 @@ public class ConcurrentHashMapV8 /** * Base class for views. */ - static abstract class MapView { + static abstract class CHMView { final ConcurrentHashMapV8 map; - MapView(ConcurrentHashMapV8 map) { this.map = map; } + CHMView(ConcurrentHashMapV8 map) { this.map = map; } public final int size() { return map.size(); } public final boolean isEmpty() { return map.isEmpty(); } public final void clear() { map.clear(); } @@ -2975,7 +3377,7 @@ public class ConcurrentHashMapV8 private static final String oomeMsg = "Required array size too large"; public final Object[] toArray() { - long sz = map.longSize(); + long sz = map.mappingCount(); if (sz > (long)(MAX_ARRAY_SIZE)) throw new OutOfMemoryError(oomeMsg); int n = (int)sz; @@ -2997,9 +3399,8 @@ public class ConcurrentHashMapV8 return (i == n) ? r : Arrays.copyOf(r, i); } - @SuppressWarnings("unchecked") - public final T[] toArray(T[] a) { - long sz = map.longSize(); + @SuppressWarnings("unchecked") public final T[] toArray(T[] a) { + long sz = map.mappingCount(); if (sz > (long)(MAX_ARRAY_SIZE)) throw new OutOfMemoryError(oomeMsg); int m = (int)sz; @@ -3086,28 +3487,7 @@ public class ConcurrentHashMapV8 } - static final class KeySet extends MapView implements Set { - KeySet(ConcurrentHashMapV8 map) { super(map); } - public final boolean contains(Object o) { return map.containsKey(o); } - public final boolean remove(Object o) { return map.remove(o) != null; } - public final Iterator iterator() { - return new KeyIterator(map); - } - public final boolean add(K e) { - throw new UnsupportedOperationException(); - } - public final boolean addAll(Collection c) { - throw new UnsupportedOperationException(); - } - public boolean equals(Object o) { - Set c; - return ((o instanceof Set) && - ((c = (Set)o) == this || - (containsAll(c) && c.containsAll(this)))); - } - } - - static final class Values extends MapView + static final class Values extends CHMView implements Collection { Values(ConcurrentHashMapV8 map) { super(map); } public final boolean contains(Object o) { return map.containsValue(o); } @@ -3132,9 +3512,10 @@ public class ConcurrentHashMapV8 public final boolean addAll(Collection c) { throw new UnsupportedOperationException(); } + } - static final class EntrySet extends MapView + static final class EntrySet extends CHMView implements Set> { EntrySet(ConcurrentHashMapV8 map) { super(map); } public final boolean contains(Object o) { @@ -3190,9 +3571,8 @@ public class ConcurrentHashMapV8 * for each key-value mapping, followed by a null pair. * The key-value mappings are emitted in no particular order. */ - @SuppressWarnings("unchecked") - private void writeObject(java.io.ObjectOutputStream s) - throws java.io.IOException { + @SuppressWarnings("unchecked") private void writeObject(java.io.ObjectOutputStream s) + throws java.io.IOException { if (segments == null) { // for serialization compatibility segments = (Segment[]) new Segment[DEFAULT_CONCURRENCY_LEVEL]; @@ -3200,7 +3580,7 @@ public class ConcurrentHashMapV8 segments[i] = new Segment(LOAD_FACTOR); } s.defaultWriteObject(); - InternalIterator it = new InternalIterator(this); + Traverser it = new Traverser(this); Object v; while ((v = it.advance()) != null) { s.writeObject(it.nextKey); @@ -3215,9 +3595,8 @@ public class ConcurrentHashMapV8 * Reconstitutes the instance from a stream (that is, deserializes it). * @param s the stream */ - @SuppressWarnings("unchecked") - private void readObject(java.io.ObjectInputStream s) - throws java.io.IOException, ClassNotFoundException { + @SuppressWarnings("unchecked") private void readObject(java.io.ObjectInputStream s) + throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); this.segments = null; // unneeded // initialize transient final field @@ -3294,6 +3673,3091 @@ public class ConcurrentHashMapV8 } } + + // ------------------------------------------------------- + + // Sams + /** Interface describing a void action of one argument */ + public interface Action { void apply(A a); } + /** Interface describing a void action of two arguments */ + public interface BiAction { void apply(A a, B b); } + /** Interface describing a function of one argument */ + public interface Fun { T apply(A a); } + /** Interface describing a function of two arguments */ + public interface BiFun { T apply(A a, B b); } + /** Interface describing a function of no arguments */ + public interface Generator { T apply(); } + /** Interface describing a function mapping its argument to a double */ + public interface ObjectToDouble { double apply(A a); } + /** Interface describing a function mapping its argument to a long */ + public interface ObjectToLong { long apply(A a); } + /** Interface describing a function mapping its argument to an int */ + public interface ObjectToInt {int apply(A a); } + /** Interface describing a function mapping two arguments to a double */ + public interface ObjectByObjectToDouble { double apply(A a, B b); } + /** Interface describing a function mapping two arguments to a long */ + public interface ObjectByObjectToLong { long apply(A a, B b); } + /** Interface describing a function mapping two arguments to an int */ + public interface ObjectByObjectToInt {int apply(A a, B b); } + /** Interface describing a function mapping a double to a double */ + public interface DoubleToDouble { double apply(double a); } + /** Interface describing a function mapping a long to a long */ + public interface LongToLong { long apply(long a); } + /** Interface describing a function mapping an int to an int */ + public interface IntToInt { int apply(int a); } + /** Interface describing a function mapping two doubles to a double */ + public interface DoubleByDoubleToDouble { double apply(double a, double b); } + /** Interface describing a function mapping two longs to a long */ + public interface LongByLongToLong { long apply(long a, long b); } + /** Interface describing a function mapping two ints to an int */ + public interface IntByIntToInt { int apply(int a, int b); } + + + // ------------------------------------------------------- + + /** + * Performs the given action for each (key, value). + * + * @param action the action + */ + public void forEach(BiAction action) { + ForkJoinTasks.forEach + (this, action).invoke(); + } + + /** + * Performs the given action for each non-null transformation + * of each (key, value). + * + * @param transformer a function returning the transformation + * for an element, or null of there is no transformation (in + * which case the action is not applied). + * @param action the action + */ + public void forEach(BiFun transformer, + Action action) { + ForkJoinTasks.forEach + (this, transformer, action).invoke(); + } + + /** + * Returns a non-null result from applying the given search + * function on each (key, value), or null if none. Upon + * success, further element processing is suppressed and the + * results of any other parallel invocations of the search + * function are ignored. + * + * @param searchFunction a function returning a non-null + * result on success, else null + * @return a non-null result from applying the given search + * function on each (key, value), or null if none + */ + public U search(BiFun searchFunction) { + return ForkJoinTasks.search + (this, searchFunction).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all (key, value) pairs using the given reducer to + * combine values, or null if none. + * + * @param transformer a function returning the transformation + * for an element, or null of there is no transformation (in + * which case it is not combined). + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all (key, value) pairs + */ + public U reduce(BiFun transformer, + BiFun reducer) { + return ForkJoinTasks.reduce + (this, transformer, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all (key, value) pairs using the given reducer to + * combine values, and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all (key, value) pairs + */ + public double reduceToDouble(ObjectByObjectToDouble transformer, + double basis, + DoubleByDoubleToDouble reducer) { + return ForkJoinTasks.reduceToDouble + (this, transformer, basis, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all (key, value) pairs using the given reducer to + * combine values, and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all (key, value) pairs + */ + public long reduceToLong(ObjectByObjectToLong transformer, + long basis, + LongByLongToLong reducer) { + return ForkJoinTasks.reduceToLong + (this, transformer, basis, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all (key, value) pairs using the given reducer to + * combine values, and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all (key, value) pairs + */ + public int reduceToInt(ObjectByObjectToInt transformer, + int basis, + IntByIntToInt reducer) { + return ForkJoinTasks.reduceToInt + (this, transformer, basis, reducer).invoke(); + } + + /** + * Performs the given action for each key. + * + * @param action the action + */ + public void forEachKey(Action action) { + ForkJoinTasks.forEachKey + (this, action).invoke(); + } + + /** + * Performs the given action for each non-null transformation + * of each key. + * + * @param transformer a function returning the transformation + * for an element, or null of there is no transformation (in + * which case the action is not applied). + * @param action the action + */ + public void forEachKey(Fun transformer, + Action action) { + ForkJoinTasks.forEachKey + (this, transformer, action).invoke(); + } + + /** + * Returns a non-null result from applying the given search + * function on each key, or null if none. Upon success, + * further element processing is suppressed and the results of + * any other parallel invocations of the search function are + * ignored. + * + * @param searchFunction a function returning a non-null + * result on success, else null + * @return a non-null result from applying the given search + * function on each key, or null if none + */ + public U searchKeys(Fun searchFunction) { + return ForkJoinTasks.searchKeys + (this, searchFunction).invoke(); + } + + /** + * Returns the result of accumulating all keys using the given + * reducer to combine values, or null if none. + * + * @param reducer a commutative associative combining function + * @return the result of accumulating all keys using the given + * reducer to combine values, or null if none + */ + public K reduceKeys(BiFun reducer) { + return ForkJoinTasks.reduceKeys + (this, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all keys using the given reducer to combine values, or + * null if none. + * + * @param transformer a function returning the transformation + * for an element, or null of there is no transformation (in + * which case it is not combined). + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all keys + */ + public U reduceKeys(Fun transformer, + BiFun reducer) { + return ForkJoinTasks.reduceKeys + (this, transformer, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all keys using the given reducer to combine values, and + * the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all keys + */ + public double reduceKeysToDouble(ObjectToDouble transformer, + double basis, + DoubleByDoubleToDouble reducer) { + return ForkJoinTasks.reduceKeysToDouble + (this, transformer, basis, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all keys using the given reducer to combine values, and + * the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all keys + */ + public long reduceKeysToLong(ObjectToLong transformer, + long basis, + LongByLongToLong reducer) { + return ForkJoinTasks.reduceKeysToLong + (this, transformer, basis, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all keys using the given reducer to combine values, and + * the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all keys + */ + public int reduceKeysToInt(ObjectToInt transformer, + int basis, + IntByIntToInt reducer) { + return ForkJoinTasks.reduceKeysToInt + (this, transformer, basis, reducer).invoke(); + } + + /** + * Performs the given action for each value. + * + * @param action the action + */ + public void forEachValue(Action action) { + ForkJoinTasks.forEachValue + (this, action).invoke(); + } + + /** + * Performs the given action for each non-null transformation + * of each value. + * + * @param transformer a function returning the transformation + * for an element, or null of there is no transformation (in + * which case the action is not applied). + */ + public void forEachValue(Fun transformer, + Action action) { + ForkJoinTasks.forEachValue + (this, transformer, action).invoke(); + } + + /** + * Returns a non-null result from applying the given search + * function on each value, or null if none. Upon success, + * further element processing is suppressed and the results of + * any other parallel invocations of the search function are + * ignored. + * + * @param searchFunction a function returning a non-null + * result on success, else null + * @return a non-null result from applying the given search + * function on each value, or null if none + * + */ + public U searchValues(Fun searchFunction) { + return ForkJoinTasks.searchValues + (this, searchFunction).invoke(); + } + + /** + * Returns the result of accumulating all values using the + * given reducer to combine values, or null if none. + * + * @param reducer a commutative associative combining function + * @return the result of accumulating all values + */ + public V reduceValues(BiFun reducer) { + return ForkJoinTasks.reduceValues + (this, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all values using the given reducer to combine values, or + * null if none. + * + * @param transformer a function returning the transformation + * for an element, or null of there is no transformation (in + * which case it is not combined). + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all values + */ + public U reduceValues(Fun transformer, + BiFun reducer) { + return ForkJoinTasks.reduceValues + (this, transformer, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all values using the given reducer to combine values, + * and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all values + */ + public double reduceValuesToDouble(ObjectToDouble transformer, + double basis, + DoubleByDoubleToDouble reducer) { + return ForkJoinTasks.reduceValuesToDouble + (this, transformer, basis, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all values using the given reducer to combine values, + * and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all values + */ + public long reduceValuesToLong(ObjectToLong transformer, + long basis, + LongByLongToLong reducer) { + return ForkJoinTasks.reduceValuesToLong + (this, transformer, basis, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all values using the given reducer to combine values, + * and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all values + */ + public int reduceValuesToInt(ObjectToInt transformer, + int basis, + IntByIntToInt reducer) { + return ForkJoinTasks.reduceValuesToInt + (this, transformer, basis, reducer).invoke(); + } + + /** + * Performs the given action for each entry. + * + * @param action the action + */ + public void forEachEntry(Action> action) { + ForkJoinTasks.forEachEntry + (this, action).invoke(); + } + + /** + * Performs the given action for each non-null transformation + * of each entry. + * + * @param transformer a function returning the transformation + * for an element, or null of there is no transformation (in + * which case the action is not applied). + * @param action the action + */ + public void forEachEntry(Fun, ? extends U> transformer, + Action action) { + ForkJoinTasks.forEachEntry + (this, transformer, action).invoke(); + } + + /** + * Returns a non-null result from applying the given search + * function on each entry, or null if none. Upon success, + * further element processing is suppressed and the results of + * any other parallel invocations of the search function are + * ignored. + * + * @param searchFunction a function returning a non-null + * result on success, else null + * @return a non-null result from applying the given search + * function on each entry, or null if none + */ + public U searchEntries(Fun, ? extends U> searchFunction) { + return ForkJoinTasks.searchEntries + (this, searchFunction).invoke(); + } + + /** + * Returns the result of accumulating all entries using the + * given reducer to combine values, or null if none. + * + * @param reducer a commutative associative combining function + * @return the result of accumulating all entries + */ + public Map.Entry reduceEntries(BiFun, Map.Entry, ? extends Map.Entry> reducer) { + return ForkJoinTasks.reduceEntries + (this, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all entries using the given reducer to combine values, + * or null if none. + * + * @param transformer a function returning the transformation + * for an element, or null of there is no transformation (in + * which case it is not combined). + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all entries + */ + public U reduceEntries(Fun, ? extends U> transformer, + BiFun reducer) { + return ForkJoinTasks.reduceEntries + (this, transformer, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all entries using the given reducer to combine values, + * and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all entries + */ + public double reduceEntriesToDouble(ObjectToDouble> transformer, + double basis, + DoubleByDoubleToDouble reducer) { + return ForkJoinTasks.reduceEntriesToDouble + (this, transformer, basis, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all entries using the given reducer to combine values, + * and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all entries + */ + public long reduceEntriesToLong(ObjectToLong> transformer, + long basis, + LongByLongToLong reducer) { + return ForkJoinTasks.reduceEntriesToLong + (this, transformer, basis, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all entries using the given reducer to combine values, + * and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all entries + */ + public int reduceEntriesToInt(ObjectToInt> transformer, + int basis, + IntByIntToInt reducer) { + return ForkJoinTasks.reduceEntriesToInt + (this, transformer, basis, reducer).invoke(); + } + + // --------------------------------------------------------------------- + + /** + * Predefined tasks for performing bulk parallel operations on + * ConcurrentHashMapV8s. These tasks follow the forms and rules used + * for bulk operations. Each method has the same name, but returns + * a task rather than invoking it. These methods may be useful in + * custom applications such as submitting a task without waiting + * for completion, using a custom pool, or combining with other + * tasks. + */ + public static class ForkJoinTasks { + private ForkJoinTasks() {} + + /** + * Returns a task that when invoked, performs the given + * action for each (key, value) + * + * @param map the map + * @param action the action + * @return the task + */ + public static ForkJoinTask forEach + (ConcurrentHashMapV8 map, + BiAction action) { + if (action == null) throw new NullPointerException(); + return new ForEachMappingTask(map, null, -1, null, action); + } + + /** + * Returns a task that when invoked, performs the given + * action for each non-null transformation of each (key, value) + * + * @param map the map + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case the action is not applied) + * @param action the action + * @return the task + */ + public static ForkJoinTask forEach + (ConcurrentHashMapV8 map, + BiFun transformer, + Action action) { + if (transformer == null || action == null) + throw new NullPointerException(); + return new ForEachTransformedMappingTask + (map, null, -1, null, transformer, action); + } + + /** + * Returns a task that when invoked, returns a non-null result + * from applying the given search function on each (key, + * value), or null if none. Upon success, further element + * processing is suppressed and the results of any other + * parallel invocations of the search function are ignored. + * + * @param map the map + * @param searchFunction a function returning a non-null + * result on success, else null + * @return the task + */ + public static ForkJoinTask search + (ConcurrentHashMapV8 map, + BiFun searchFunction) { + if (searchFunction == null) throw new NullPointerException(); + return new SearchMappingsTask + (map, null, -1, null, searchFunction, + new AtomicReference()); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all (key, value) pairs + * using the given reducer to combine values, or null if none. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case it is not combined). + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduce + (ConcurrentHashMapV8 map, + BiFun transformer, + BiFun reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceMappingsTask + (map, null, -1, null, transformer, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all (key, value) pairs + * using the given reducer to combine values, and the given + * basis as an identity value. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceToDouble + (ConcurrentHashMapV8 map, + ObjectByObjectToDouble transformer, + double basis, + DoubleByDoubleToDouble reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceMappingsToDoubleTask + (map, null, -1, null, transformer, basis, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all (key, value) pairs + * using the given reducer to combine values, and the given + * basis as an identity value. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceToLong + (ConcurrentHashMapV8 map, + ObjectByObjectToLong transformer, + long basis, + LongByLongToLong reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceMappingsToLongTask + (map, null, -1, null, transformer, basis, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all (key, value) pairs + * using the given reducer to combine values, and the given + * basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceToInt + (ConcurrentHashMapV8 map, + ObjectByObjectToInt transformer, + int basis, + IntByIntToInt reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceMappingsToIntTask + (map, null, -1, null, transformer, basis, reducer); + } + + /** + * Returns a task that when invoked, performs the given action + * for each key. + * + * @param map the map + * @param action the action + * @return the task + */ + public static ForkJoinTask forEachKey + (ConcurrentHashMapV8 map, + Action action) { + if (action == null) throw new NullPointerException(); + return new ForEachKeyTask(map, null, -1, null, action); + } + + /** + * Returns a task that when invoked, performs the given action + * for each non-null transformation of each key. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case the action is not applied) + * @param action the action + * @return the task + */ + public static ForkJoinTask forEachKey + (ConcurrentHashMapV8 map, + Fun transformer, + Action action) { + if (transformer == null || action == null) + throw new NullPointerException(); + return new ForEachTransformedKeyTask + (map, null, -1, null, transformer, action); + } + + /** + * Returns a task that when invoked, returns a non-null result + * from applying the given search function on each key, or + * null if none. Upon success, further element processing is + * suppressed and the results of any other parallel + * invocations of the search function are ignored. + * + * @param map the map + * @param searchFunction a function returning a non-null + * result on success, else null + * @return the task + */ + public static ForkJoinTask searchKeys + (ConcurrentHashMapV8 map, + Fun searchFunction) { + if (searchFunction == null) throw new NullPointerException(); + return new SearchKeysTask + (map, null, -1, null, searchFunction, + new AtomicReference()); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating all keys using the given reducer to combine + * values, or null if none. + * + * @param map the map + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceKeys + (ConcurrentHashMapV8 map, + BiFun reducer) { + if (reducer == null) throw new NullPointerException(); + return new ReduceKeysTask + (map, null, -1, null, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all keys using the given + * reducer to combine values, or null if none. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case it is not combined). + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceKeys + (ConcurrentHashMapV8 map, + Fun transformer, + BiFun reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceKeysTask + (map, null, -1, null, transformer, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all keys using the given + * reducer to combine values, and the given basis as an + * identity value. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceKeysToDouble + (ConcurrentHashMapV8 map, + ObjectToDouble transformer, + double basis, + DoubleByDoubleToDouble reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceKeysToDoubleTask + (map, null, -1, null, transformer, basis, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all keys using the given + * reducer to combine values, and the given basis as an + * identity value. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceKeysToLong + (ConcurrentHashMapV8 map, + ObjectToLong transformer, + long basis, + LongByLongToLong reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceKeysToLongTask + (map, null, -1, null, transformer, basis, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all keys using the given + * reducer to combine values, and the given basis as an + * identity value. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceKeysToInt + (ConcurrentHashMapV8 map, + ObjectToInt transformer, + int basis, + IntByIntToInt reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceKeysToIntTask + (map, null, -1, null, transformer, basis, reducer); + } + + /** + * Returns a task that when invoked, performs the given action + * for each value. + * + * @param map the map + * @param action the action + */ + public static ForkJoinTask forEachValue + (ConcurrentHashMapV8 map, + Action action) { + if (action == null) throw new NullPointerException(); + return new ForEachValueTask(map, null, -1, null, action); + } + + /** + * Returns a task that when invoked, performs the given action + * for each non-null transformation of each value. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case the action is not applied) + * @param action the action + */ + public static ForkJoinTask forEachValue + (ConcurrentHashMapV8 map, + Fun transformer, + Action action) { + if (transformer == null || action == null) + throw new NullPointerException(); + return new ForEachTransformedValueTask + (map, null, -1, null, transformer, action); + } + + /** + * Returns a task that when invoked, returns a non-null result + * from applying the given search function on each value, or + * null if none. Upon success, further element processing is + * suppressed and the results of any other parallel + * invocations of the search function are ignored. + * + * @param map the map + * @param searchFunction a function returning a non-null + * result on success, else null + * @return the task + */ + public static ForkJoinTask searchValues + (ConcurrentHashMapV8 map, + Fun searchFunction) { + if (searchFunction == null) throw new NullPointerException(); + return new SearchValuesTask + (map, null, -1, null, searchFunction, + new AtomicReference()); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating all values using the given reducer to combine + * values, or null if none. + * + * @param map the map + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceValues + (ConcurrentHashMapV8 map, + BiFun reducer) { + if (reducer == null) throw new NullPointerException(); + return new ReduceValuesTask + (map, null, -1, null, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all values using the + * given reducer to combine values, or null if none. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case it is not combined). + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceValues + (ConcurrentHashMapV8 map, + Fun transformer, + BiFun reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceValuesTask + (map, null, -1, null, transformer, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all values using the + * given reducer to combine values, and the given basis as an + * identity value. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceValuesToDouble + (ConcurrentHashMapV8 map, + ObjectToDouble transformer, + double basis, + DoubleByDoubleToDouble reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceValuesToDoubleTask + (map, null, -1, null, transformer, basis, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all values using the + * given reducer to combine values, and the given basis as an + * identity value. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceValuesToLong + (ConcurrentHashMapV8 map, + ObjectToLong transformer, + long basis, + LongByLongToLong reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceValuesToLongTask + (map, null, -1, null, transformer, basis, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all values using the + * given reducer to combine values, and the given basis as an + * identity value. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceValuesToInt + (ConcurrentHashMapV8 map, + ObjectToInt transformer, + int basis, + IntByIntToInt reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceValuesToIntTask + (map, null, -1, null, transformer, basis, reducer); + } + + /** + * Returns a task that when invoked, perform the given action + * for each entry. + * + * @param map the map + * @param action the action + */ + public static ForkJoinTask forEachEntry + (ConcurrentHashMapV8 map, + Action> action) { + if (action == null) throw new NullPointerException(); + return new ForEachEntryTask(map, null, -1, null, action); + } + + /** + * Returns a task that when invoked, perform the given action + * for each non-null transformation of each entry. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case the action is not applied) + * @param action the action + */ + public static ForkJoinTask forEachEntry + (ConcurrentHashMapV8 map, + Fun, ? extends U> transformer, + Action action) { + if (transformer == null || action == null) + throw new NullPointerException(); + return new ForEachTransformedEntryTask + (map, null, -1, null, transformer, action); + } + + /** + * Returns a task that when invoked, returns a non-null result + * from applying the given search function on each entry, or + * null if none. Upon success, further element processing is + * suppressed and the results of any other parallel + * invocations of the search function are ignored. + * + * @param map the map + * @param searchFunction a function returning a non-null + * result on success, else null + * @return the task + */ + public static ForkJoinTask searchEntries + (ConcurrentHashMapV8 map, + Fun, ? extends U> searchFunction) { + if (searchFunction == null) throw new NullPointerException(); + return new SearchEntriesTask + (map, null, -1, null, searchFunction, + new AtomicReference()); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating all entries using the given reducer to combine + * values, or null if none. + * + * @param map the map + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask> reduceEntries + (ConcurrentHashMapV8 map, + BiFun, Map.Entry, ? extends Map.Entry> reducer) { + if (reducer == null) throw new NullPointerException(); + return new ReduceEntriesTask + (map, null, -1, null, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all entries using the + * given reducer to combine values, or null if none. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case it is not combined). + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceEntries + (ConcurrentHashMapV8 map, + Fun, ? extends U> transformer, + BiFun reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceEntriesTask + (map, null, -1, null, transformer, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all entries using the + * given reducer to combine values, and the given basis as an + * identity value. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceEntriesToDouble + (ConcurrentHashMapV8 map, + ObjectToDouble> transformer, + double basis, + DoubleByDoubleToDouble reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceEntriesToDoubleTask + (map, null, -1, null, transformer, basis, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all entries using the + * given reducer to combine values, and the given basis as an + * identity value. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceEntriesToLong + (ConcurrentHashMapV8 map, + ObjectToLong> transformer, + long basis, + LongByLongToLong reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceEntriesToLongTask + (map, null, -1, null, transformer, basis, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all entries using the + * given reducer to combine values, and the given basis as an + * identity value. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceEntriesToInt + (ConcurrentHashMapV8 map, + ObjectToInt> transformer, + int basis, + IntByIntToInt reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceEntriesToIntTask + (map, null, -1, null, transformer, basis, reducer); + } + } + + // ------------------------------------------------------- + + /** + * Base for FJ tasks for bulk operations. This adds a variant of + * CountedCompleters and some split and merge bookkeeping to + * iterator functionality. The forEach and reduce methods are + * similar to those illustrated in CountedCompleter documentation, + * except that bottom-up reduction completions perform them within + * their compute methods. The search methods are like forEach + * except they continually poll for success and exit early. Also, + * exceptions are handled in a simpler manner, by just trying to + * complete root task exceptionally. + */ + @SuppressWarnings("serial") static abstract class BulkTask extends Traverser { + final BulkTask parent; // completion target + int batch; // split control; -1 for unknown + int pending; // completion control + + BulkTask(ConcurrentHashMapV8 map, BulkTask parent, + int batch) { + super(map); + this.parent = parent; + this.batch = batch; + if (parent != null && map != null) { // split parent + Node[] t; + if ((t = parent.tab) == null && + (t = parent.tab = map.table) != null) + parent.baseLimit = parent.baseSize = t.length; + this.tab = t; + this.baseSize = parent.baseSize; + int hi = this.baseLimit = parent.baseLimit; + parent.baseLimit = this.index = this.baseIndex = + (hi + parent.baseIndex + 1) >>> 1; + } + } + + /** + * Forces root task to complete. + * @param ex if null, complete normally, else exceptionally + * @return false to simplify use + */ + final boolean tryCompleteComputation(Throwable ex) { + for (BulkTask a = this;;) { + BulkTask p = a.parent; + if (p == null) { + if (ex != null) + a.completeExceptionally(ex); + else + a.quietlyComplete(); + return false; + } + a = p; + } + } + + /** + * Version of tryCompleteComputation for function screening checks + */ + final boolean abortOnNullFunction() { + return tryCompleteComputation(new Error("Unexpected null function")); + } + + // utilities + + /** CompareAndSet pending count */ + final boolean casPending(int cmp, int val) { + return U.compareAndSwapInt(this, PENDING, cmp, val); + } + + /** + * Returns approx exp2 of the number of times (minus one) to + * split task by two before executing leaf action. This value + * is faster to compute and more convenient to use as a guide + * to splitting than is the depth, since it is used while + * dividing by two anyway. + */ + final int batch() { + ConcurrentHashMapV8 m; int b; Node[] t; ForkJoinPool pool; + if ((b = batch) < 0 && (m = map) != null) { // force initialization + if ((t = tab) == null && (t = tab = m.table) != null) + baseLimit = baseSize = t.length; + if (t != null) { + long n = m.counter.sum(); + int par = ((pool = getPool()) == null) ? + ForkJoinPool.getCommonPoolParallelism() : + pool.getParallelism(); + int sp = par << 3; // slack of 8 + b = batch = (n <= 0L) ? 0 : (n < (long)sp) ? (int)n : sp; + } + } + return b; + } + + /** + * Returns exportable snapshot entry. + */ + static AbstractMap.SimpleEntry entryFor(K k, V v) { + return new AbstractMap.SimpleEntry(k, v); + } + + // Unsafe mechanics + private static final sun.misc.Unsafe U; + private static final long PENDING; + static { + try { + U = getUnsafe(); + PENDING = U.objectFieldOffset + (BulkTask.class.getDeclaredField("pending")); + } catch (Exception e) { + throw new Error(e); + } + } + } + + /** + * Base class for non-reductive actions + */ + @SuppressWarnings("serial") static abstract class BulkAction extends BulkTask { + BulkAction nextTask; + BulkAction(ConcurrentHashMapV8 map, BulkTask parent, + int batch, BulkAction nextTask) { + super(map, parent, batch); + this.nextTask = nextTask; + } + + /** + * Try to complete task and upward parents. Upon hitting + * non-completed parent, if a non-FJ task, try to help out the + * computation. + */ + final void tryComplete(BulkAction subtasks) { + BulkTask a = this, s = a; + for (int c;;) { + if ((c = a.pending) == 0) { + if ((a = (s = a).parent) == null) { + s.quietlyComplete(); + break; + } + } + else if (a.casPending(c, c - 1)) { + if (subtasks != null && !inForkJoinPool()) { + while ((s = a.parent) != null) + a = s; + while (!a.isDone()) { + BulkAction next = subtasks.nextTask; + if (subtasks.tryUnfork()) + subtasks.exec(); + if ((subtasks = next) == null) + break; + } + } + break; + } + } + } + + } + + /* + * Task classes. Coded in a regular but ugly format/style to + * simplify checks that each variant differs in the right way from + * others. + */ + + @SuppressWarnings("serial") static final class ForEachKeyTask + extends BulkAction { + final Action action; + ForEachKeyTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + ForEachKeyTask nextTask, + Action action) { + super(m, p, b, nextTask); + this.action = action; + } + @SuppressWarnings("unchecked") public final boolean exec() { + final Action action = this.action; + if (action == null) + return abortOnNullFunction(); + ForEachKeyTask subtasks = null; + try { + int b = batch(), c; + while (b > 1 && baseIndex != baseLimit) { + do {} while (!casPending(c = pending, c+1)); + (subtasks = new ForEachKeyTask + (map, this, b >>>= 1, subtasks, action)).fork(); + } + while (advance() != null) + action.apply((K)nextKey); + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + tryComplete(subtasks); + return false; + } + } + + @SuppressWarnings("serial") static final class ForEachValueTask + extends BulkAction { + final Action action; + ForEachValueTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + ForEachValueTask nextTask, + Action action) { + super(m, p, b, nextTask); + this.action = action; + } + @SuppressWarnings("unchecked") public final boolean exec() { + final Action action = this.action; + if (action == null) + return abortOnNullFunction(); + ForEachValueTask subtasks = null; + try { + int b = batch(), c; + while (b > 1 && baseIndex != baseLimit) { + do {} while (!casPending(c = pending, c+1)); + (subtasks = new ForEachValueTask + (map, this, b >>>= 1, subtasks, action)).fork(); + } + Object v; + while ((v = advance()) != null) + action.apply((V)v); + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + tryComplete(subtasks); + return false; + } + } + + @SuppressWarnings("serial") static final class ForEachEntryTask + extends BulkAction { + final Action> action; + ForEachEntryTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + ForEachEntryTask nextTask, + Action> action) { + super(m, p, b, nextTask); + this.action = action; + } + @SuppressWarnings("unchecked") public final boolean exec() { + final Action> action = this.action; + if (action == null) + return abortOnNullFunction(); + ForEachEntryTask subtasks = null; + try { + int b = batch(), c; + while (b > 1 && baseIndex != baseLimit) { + do {} while (!casPending(c = pending, c+1)); + (subtasks = new ForEachEntryTask + (map, this, b >>>= 1, subtasks, action)).fork(); + } + Object v; + while ((v = advance()) != null) + action.apply(entryFor((K)nextKey, (V)v)); + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + tryComplete(subtasks); + return false; + } + } + + @SuppressWarnings("serial") static final class ForEachMappingTask + extends BulkAction { + final BiAction action; + ForEachMappingTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + ForEachMappingTask nextTask, + BiAction action) { + super(m, p, b, nextTask); + this.action = action; + } + @SuppressWarnings("unchecked") public final boolean exec() { + final BiAction action = this.action; + if (action == null) + return abortOnNullFunction(); + ForEachMappingTask subtasks = null; + try { + int b = batch(), c; + while (b > 1 && baseIndex != baseLimit) { + do {} while (!casPending(c = pending, c+1)); + (subtasks = new ForEachMappingTask + (map, this, b >>>= 1, subtasks, action)).fork(); + } + Object v; + while ((v = advance()) != null) + action.apply((K)nextKey, (V)v); + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + tryComplete(subtasks); + return false; + } + } + + @SuppressWarnings("serial") static final class ForEachTransformedKeyTask + extends BulkAction { + final Fun transformer; + final Action action; + ForEachTransformedKeyTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + ForEachTransformedKeyTask nextTask, + Fun transformer, + Action action) { + super(m, p, b, nextTask); + this.transformer = transformer; + this.action = action; + + } + @SuppressWarnings("unchecked") public final boolean exec() { + final Fun transformer = + this.transformer; + final Action action = this.action; + if (transformer == null || action == null) + return abortOnNullFunction(); + ForEachTransformedKeyTask subtasks = null; + try { + int b = batch(), c; + while (b > 1 && baseIndex != baseLimit) { + do {} while (!casPending(c = pending, c+1)); + (subtasks = new ForEachTransformedKeyTask + (map, this, b >>>= 1, subtasks, transformer, action)).fork(); + } + U u; + while (advance() != null) { + if ((u = transformer.apply((K)nextKey)) != null) + action.apply(u); + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + tryComplete(subtasks); + return false; + } + } + + @SuppressWarnings("serial") static final class ForEachTransformedValueTask + extends BulkAction { + final Fun transformer; + final Action action; + ForEachTransformedValueTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + ForEachTransformedValueTask nextTask, + Fun transformer, + Action action) { + super(m, p, b, nextTask); + this.transformer = transformer; + this.action = action; + + } + @SuppressWarnings("unchecked") public final boolean exec() { + final Fun transformer = + this.transformer; + final Action action = this.action; + if (transformer == null || action == null) + return abortOnNullFunction(); + ForEachTransformedValueTask subtasks = null; + try { + int b = batch(), c; + while (b > 1 && baseIndex != baseLimit) { + do {} while (!casPending(c = pending, c+1)); + (subtasks = new ForEachTransformedValueTask + (map, this, b >>>= 1, subtasks, transformer, action)).fork(); + } + Object v; U u; + while ((v = advance()) != null) { + if ((u = transformer.apply((V)v)) != null) + action.apply(u); + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + tryComplete(subtasks); + return false; + } + } + + @SuppressWarnings("serial") static final class ForEachTransformedEntryTask + extends BulkAction { + final Fun, ? extends U> transformer; + final Action action; + ForEachTransformedEntryTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + ForEachTransformedEntryTask nextTask, + Fun, ? extends U> transformer, + Action action) { + super(m, p, b, nextTask); + this.transformer = transformer; + this.action = action; + + } + @SuppressWarnings("unchecked") public final boolean exec() { + final Fun, ? extends U> transformer = + this.transformer; + final Action action = this.action; + if (transformer == null || action == null) + return abortOnNullFunction(); + ForEachTransformedEntryTask subtasks = null; + try { + int b = batch(), c; + while (b > 1 && baseIndex != baseLimit) { + do {} while (!casPending(c = pending, c+1)); + (subtasks = new ForEachTransformedEntryTask + (map, this, b >>>= 1, subtasks, transformer, action)).fork(); + } + Object v; U u; + while ((v = advance()) != null) { + if ((u = transformer.apply(entryFor((K)nextKey, (V)v))) != null) + action.apply(u); + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + tryComplete(subtasks); + return false; + } + } + + @SuppressWarnings("serial") static final class ForEachTransformedMappingTask + extends BulkAction { + final BiFun transformer; + final Action action; + ForEachTransformedMappingTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + ForEachTransformedMappingTask nextTask, + BiFun transformer, + Action action) { + super(m, p, b, nextTask); + this.transformer = transformer; + this.action = action; + + } + @SuppressWarnings("unchecked") public final boolean exec() { + final BiFun transformer = + this.transformer; + final Action action = this.action; + if (transformer == null || action == null) + return abortOnNullFunction(); + ForEachTransformedMappingTask subtasks = null; + try { + int b = batch(), c; + while (b > 1 && baseIndex != baseLimit) { + do {} while (!casPending(c = pending, c+1)); + (subtasks = new ForEachTransformedMappingTask + (map, this, b >>>= 1, subtasks, transformer, action)).fork(); + } + Object v; U u; + while ((v = advance()) != null) { + if ((u = transformer.apply((K)nextKey, (V)v)) != null) + action.apply(u); + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + tryComplete(subtasks); + return false; + } + } + + @SuppressWarnings("serial") static final class SearchKeysTask + extends BulkAction { + final Fun searchFunction; + final AtomicReference result; + SearchKeysTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + SearchKeysTask nextTask, + Fun searchFunction, + AtomicReference result) { + super(m, p, b, nextTask); + this.searchFunction = searchFunction; this.result = result; + } + @SuppressWarnings("unchecked") public final boolean exec() { + AtomicReference result = this.result; + final Fun searchFunction = + this.searchFunction; + if (searchFunction == null || result == null) + return abortOnNullFunction(); + SearchKeysTask subtasks = null; + try { + int b = batch(), c; + while (b > 1 && baseIndex != baseLimit && result.get() == null) { + do {} while (!casPending(c = pending, c+1)); + (subtasks = new SearchKeysTask + (map, this, b >>>= 1, subtasks, searchFunction, result)).fork(); + } + U u; + while (result.get() == null && advance() != null) { + if ((u = searchFunction.apply((K)nextKey)) != null) { + if (result.compareAndSet(null, u)) + tryCompleteComputation(null); + break; + } + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + tryComplete(subtasks); + return false; + } + public final U getRawResult() { return result.get(); } + } + + @SuppressWarnings("serial") static final class SearchValuesTask + extends BulkAction { + final Fun searchFunction; + final AtomicReference result; + SearchValuesTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + SearchValuesTask nextTask, + Fun searchFunction, + AtomicReference result) { + super(m, p, b, nextTask); + this.searchFunction = searchFunction; this.result = result; + } + @SuppressWarnings("unchecked") public final boolean exec() { + AtomicReference result = this.result; + final Fun searchFunction = + this.searchFunction; + if (searchFunction == null || result == null) + return abortOnNullFunction(); + SearchValuesTask subtasks = null; + try { + int b = batch(), c; + while (b > 1 && baseIndex != baseLimit && result.get() == null) { + do {} while (!casPending(c = pending, c+1)); + (subtasks = new SearchValuesTask + (map, this, b >>>= 1, subtasks, searchFunction, result)).fork(); + } + Object v; U u; + while (result.get() == null && (v = advance()) != null) { + if ((u = searchFunction.apply((V)v)) != null) { + if (result.compareAndSet(null, u)) + tryCompleteComputation(null); + break; + } + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + tryComplete(subtasks); + return false; + } + public final U getRawResult() { return result.get(); } + } + + @SuppressWarnings("serial") static final class SearchEntriesTask + extends BulkAction { + final Fun, ? extends U> searchFunction; + final AtomicReference result; + SearchEntriesTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + SearchEntriesTask nextTask, + Fun, ? extends U> searchFunction, + AtomicReference result) { + super(m, p, b, nextTask); + this.searchFunction = searchFunction; this.result = result; + } + @SuppressWarnings("unchecked") public final boolean exec() { + AtomicReference result = this.result; + final Fun, ? extends U> searchFunction = + this.searchFunction; + if (searchFunction == null || result == null) + return abortOnNullFunction(); + SearchEntriesTask subtasks = null; + try { + int b = batch(), c; + while (b > 1 && baseIndex != baseLimit && result.get() == null) { + do {} while (!casPending(c = pending, c+1)); + (subtasks = new SearchEntriesTask + (map, this, b >>>= 1, subtasks, searchFunction, result)).fork(); + } + Object v; U u; + while (result.get() == null && (v = advance()) != null) { + if ((u = searchFunction.apply(entryFor((K)nextKey, (V)v))) != null) { + if (result.compareAndSet(null, u)) + tryCompleteComputation(null); + break; + } + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + tryComplete(subtasks); + return false; + } + public final U getRawResult() { return result.get(); } + } + + @SuppressWarnings("serial") static final class SearchMappingsTask + extends BulkAction { + final BiFun searchFunction; + final AtomicReference result; + SearchMappingsTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + SearchMappingsTask nextTask, + BiFun searchFunction, + AtomicReference result) { + super(m, p, b, nextTask); + this.searchFunction = searchFunction; this.result = result; + } + @SuppressWarnings("unchecked") public final boolean exec() { + AtomicReference result = this.result; + final BiFun searchFunction = + this.searchFunction; + if (searchFunction == null || result == null) + return abortOnNullFunction(); + SearchMappingsTask subtasks = null; + try { + int b = batch(), c; + while (b > 1 && baseIndex != baseLimit && result.get() == null) { + do {} while (!casPending(c = pending, c+1)); + (subtasks = new SearchMappingsTask + (map, this, b >>>= 1, subtasks, searchFunction, result)).fork(); + } + Object v; U u; + while (result.get() == null && (v = advance()) != null) { + if ((u = searchFunction.apply((K)nextKey, (V)v)) != null) { + if (result.compareAndSet(null, u)) + tryCompleteComputation(null); + break; + } + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + tryComplete(subtasks); + return false; + } + public final U getRawResult() { return result.get(); } + } + + @SuppressWarnings("serial") static final class ReduceKeysTask + extends BulkTask { + final BiFun reducer; + K result; + ReduceKeysTask rights, nextRight; + ReduceKeysTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + ReduceKeysTask nextRight, + BiFun reducer) { + super(m, p, b); this.nextRight = nextRight; + this.reducer = reducer; + } + @SuppressWarnings("unchecked") public final boolean exec() { + final BiFun reducer = + this.reducer; + if (reducer == null) + return abortOnNullFunction(); + try { + for (int c, b = batch(); b > 1 && baseIndex != baseLimit;) { + do {} while (!casPending(c = pending, c+1)); + (rights = new ReduceKeysTask + (map, this, b >>>= 1, rights, reducer)).fork(); + } + K r = null; + while (advance() != null) { + K u = (K)nextKey; + r = (r == null) ? u : reducer.apply(r, u); + } + result = r; + for (ReduceKeysTask t = this, s;;) { + int c; BulkTask par; K tr, sr; + if ((c = t.pending) == 0) { + for (s = t.rights; s != null; s = t.rights = s.nextRight) { + if ((sr = s.result) != null) + t.result = ((tr = t.result) == null) ? sr : reducer.apply(tr, sr); + } + if ((par = t.parent) == null || + !(par instanceof ReduceKeysTask)) { + t.quietlyComplete(); + break; + } + t = (ReduceKeysTask)par; + } + else if (t.casPending(c, c - 1)) + break; + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + ReduceKeysTask s = rights; + if (s != null && !inForkJoinPool()) { + do { + if (s.tryUnfork()) + s.exec(); + } while ((s = s.nextRight) != null); + } + return false; + } + public final K getRawResult() { return result; } + } + + @SuppressWarnings("serial") static final class ReduceValuesTask + extends BulkTask { + final BiFun reducer; + V result; + ReduceValuesTask rights, nextRight; + ReduceValuesTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + ReduceValuesTask nextRight, + BiFun reducer) { + super(m, p, b); this.nextRight = nextRight; + this.reducer = reducer; + } + @SuppressWarnings("unchecked") public final boolean exec() { + final BiFun reducer = + this.reducer; + if (reducer == null) + return abortOnNullFunction(); + try { + for (int c, b = batch(); b > 1 && baseIndex != baseLimit;) { + do {} while (!casPending(c = pending, c+1)); + (rights = new ReduceValuesTask + (map, this, b >>>= 1, rights, reducer)).fork(); + } + V r = null; + Object v; + while ((v = advance()) != null) { + V u = (V)v; + r = (r == null) ? u : reducer.apply(r, u); + } + result = r; + for (ReduceValuesTask t = this, s;;) { + int c; BulkTask par; V tr, sr; + if ((c = t.pending) == 0) { + for (s = t.rights; s != null; s = t.rights = s.nextRight) { + if ((sr = s.result) != null) + t.result = ((tr = t.result) == null) ? sr : reducer.apply(tr, sr); + } + if ((par = t.parent) == null || + !(par instanceof ReduceValuesTask)) { + t.quietlyComplete(); + break; + } + t = (ReduceValuesTask)par; + } + else if (t.casPending(c, c - 1)) + break; + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + ReduceValuesTask s = rights; + if (s != null && !inForkJoinPool()) { + do { + if (s.tryUnfork()) + s.exec(); + } while ((s = s.nextRight) != null); + } + return false; + } + public final V getRawResult() { return result; } + } + + @SuppressWarnings("serial") static final class ReduceEntriesTask + extends BulkTask> { + final BiFun, Map.Entry, ? extends Map.Entry> reducer; + Map.Entry result; + ReduceEntriesTask rights, nextRight; + ReduceEntriesTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + ReduceEntriesTask nextRight, + BiFun, Map.Entry, ? extends Map.Entry> reducer) { + super(m, p, b); this.nextRight = nextRight; + this.reducer = reducer; + } + @SuppressWarnings("unchecked") public final boolean exec() { + final BiFun, Map.Entry, ? extends Map.Entry> reducer = + this.reducer; + if (reducer == null) + return abortOnNullFunction(); + try { + for (int c, b = batch(); b > 1 && baseIndex != baseLimit;) { + do {} while (!casPending(c = pending, c+1)); + (rights = new ReduceEntriesTask + (map, this, b >>>= 1, rights, reducer)).fork(); + } + Map.Entry r = null; + Object v; + while ((v = advance()) != null) { + Map.Entry u = entryFor((K)nextKey, (V)v); + r = (r == null) ? u : reducer.apply(r, u); + } + result = r; + for (ReduceEntriesTask t = this, s;;) { + int c; BulkTask par; Map.Entry tr, sr; + if ((c = t.pending) == 0) { + for (s = t.rights; s != null; s = t.rights = s.nextRight) { + if ((sr = s.result) != null) + t.result = ((tr = t.result) == null) ? sr : reducer.apply(tr, sr); + } + if ((par = t.parent) == null || + !(par instanceof ReduceEntriesTask)) { + t.quietlyComplete(); + break; + } + t = (ReduceEntriesTask)par; + } + else if (t.casPending(c, c - 1)) + break; + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + ReduceEntriesTask s = rights; + if (s != null && !inForkJoinPool()) { + do { + if (s.tryUnfork()) + s.exec(); + } while ((s = s.nextRight) != null); + } + return false; + } + public final Map.Entry getRawResult() { return result; } + } + + @SuppressWarnings("serial") static final class MapReduceKeysTask + extends BulkTask { + final Fun transformer; + final BiFun reducer; + U result; + MapReduceKeysTask rights, nextRight; + MapReduceKeysTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + MapReduceKeysTask nextRight, + Fun transformer, + BiFun reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.reducer = reducer; + } + @SuppressWarnings("unchecked") public final boolean exec() { + final Fun transformer = + this.transformer; + final BiFun reducer = + this.reducer; + if (transformer == null || reducer == null) + return abortOnNullFunction(); + try { + for (int c, b = batch(); b > 1 && baseIndex != baseLimit;) { + do {} while (!casPending(c = pending, c+1)); + (rights = new MapReduceKeysTask + (map, this, b >>>= 1, rights, transformer, reducer)).fork(); + } + U r = null, u; + while (advance() != null) { + if ((u = transformer.apply((K)nextKey)) != null) + r = (r == null) ? u : reducer.apply(r, u); + } + result = r; + for (MapReduceKeysTask t = this, s;;) { + int c; BulkTask par; U tr, sr; + if ((c = t.pending) == 0) { + for (s = t.rights; s != null; s = t.rights = s.nextRight) { + if ((sr = s.result) != null) + t.result = ((tr = t.result) == null) ? sr : reducer.apply(tr, sr); + } + if ((par = t.parent) == null || + !(par instanceof MapReduceKeysTask)) { + t.quietlyComplete(); + break; + } + t = (MapReduceKeysTask)par; + } + else if (t.casPending(c, c - 1)) + break; + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + MapReduceKeysTask s = rights; + if (s != null && !inForkJoinPool()) { + do { + if (s.tryUnfork()) + s.exec(); + } while ((s = s.nextRight) != null); + } + return false; + } + public final U getRawResult() { return result; } + } + + @SuppressWarnings("serial") static final class MapReduceValuesTask + extends BulkTask { + final Fun transformer; + final BiFun reducer; + U result; + MapReduceValuesTask rights, nextRight; + MapReduceValuesTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + MapReduceValuesTask nextRight, + Fun transformer, + BiFun reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.reducer = reducer; + } + @SuppressWarnings("unchecked") public final boolean exec() { + final Fun transformer = + this.transformer; + final BiFun reducer = + this.reducer; + if (transformer == null || reducer == null) + return abortOnNullFunction(); + try { + for (int c, b = batch(); b > 1 && baseIndex != baseLimit;) { + do {} while (!casPending(c = pending, c+1)); + (rights = new MapReduceValuesTask + (map, this, b >>>= 1, rights, transformer, reducer)).fork(); + } + U r = null, u; + Object v; + while ((v = advance()) != null) { + if ((u = transformer.apply((V)v)) != null) + r = (r == null) ? u : reducer.apply(r, u); + } + result = r; + for (MapReduceValuesTask t = this, s;;) { + int c; BulkTask par; U tr, sr; + if ((c = t.pending) == 0) { + for (s = t.rights; s != null; s = t.rights = s.nextRight) { + if ((sr = s.result) != null) + t.result = ((tr = t.result) == null) ? sr : reducer.apply(tr, sr); + } + if ((par = t.parent) == null || + !(par instanceof MapReduceValuesTask)) { + t.quietlyComplete(); + break; + } + t = (MapReduceValuesTask)par; + } + else if (t.casPending(c, c - 1)) + break; + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + MapReduceValuesTask s = rights; + if (s != null && !inForkJoinPool()) { + do { + if (s.tryUnfork()) + s.exec(); + } while ((s = s.nextRight) != null); + } + return false; + } + public final U getRawResult() { return result; } + } + + @SuppressWarnings("serial") static final class MapReduceEntriesTask + extends BulkTask { + final Fun, ? extends U> transformer; + final BiFun reducer; + U result; + MapReduceEntriesTask rights, nextRight; + MapReduceEntriesTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + MapReduceEntriesTask nextRight, + Fun, ? extends U> transformer, + BiFun reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.reducer = reducer; + } + @SuppressWarnings("unchecked") public final boolean exec() { + final Fun, ? extends U> transformer = + this.transformer; + final BiFun reducer = + this.reducer; + if (transformer == null || reducer == null) + return abortOnNullFunction(); + try { + for (int c, b = batch(); b > 1 && baseIndex != baseLimit;) { + do {} while (!casPending(c = pending, c+1)); + (rights = new MapReduceEntriesTask + (map, this, b >>>= 1, rights, transformer, reducer)).fork(); + } + U r = null, u; + Object v; + while ((v = advance()) != null) { + if ((u = transformer.apply(entryFor((K)nextKey, (V)v))) != null) + r = (r == null) ? u : reducer.apply(r, u); + } + result = r; + for (MapReduceEntriesTask t = this, s;;) { + int c; BulkTask par; U tr, sr; + if ((c = t.pending) == 0) { + for (s = t.rights; s != null; s = t.rights = s.nextRight) { + if ((sr = s.result) != null) + t.result = ((tr = t.result) == null) ? sr : reducer.apply(tr, sr); + } + if ((par = t.parent) == null || + !(par instanceof MapReduceEntriesTask)) { + t.quietlyComplete(); + break; + } + t = (MapReduceEntriesTask)par; + } + else if (t.casPending(c, c - 1)) + break; + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + MapReduceEntriesTask s = rights; + if (s != null && !inForkJoinPool()) { + do { + if (s.tryUnfork()) + s.exec(); + } while ((s = s.nextRight) != null); + } + return false; + } + public final U getRawResult() { return result; } + } + + @SuppressWarnings("serial") static final class MapReduceMappingsTask + extends BulkTask { + final BiFun transformer; + final BiFun reducer; + U result; + MapReduceMappingsTask rights, nextRight; + MapReduceMappingsTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + MapReduceMappingsTask nextRight, + BiFun transformer, + BiFun reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.reducer = reducer; + } + @SuppressWarnings("unchecked") public final boolean exec() { + final BiFun transformer = + this.transformer; + final BiFun reducer = + this.reducer; + if (transformer == null || reducer == null) + return abortOnNullFunction(); + try { + for (int c, b = batch(); b > 1 && baseIndex != baseLimit;) { + do {} while (!casPending(c = pending, c+1)); + (rights = new MapReduceMappingsTask + (map, this, b >>>= 1, rights, transformer, reducer)).fork(); + } + U r = null, u; + Object v; + while ((v = advance()) != null) { + if ((u = transformer.apply((K)nextKey, (V)v)) != null) + r = (r == null) ? u : reducer.apply(r, u); + } + result = r; + for (MapReduceMappingsTask t = this, s;;) { + int c; BulkTask par; U tr, sr; + if ((c = t.pending) == 0) { + for (s = t.rights; s != null; s = t.rights = s.nextRight) { + if ((sr = s.result) != null) + t.result = ((tr = t.result) == null) ? sr : reducer.apply(tr, sr); + } + if ((par = t.parent) == null || + !(par instanceof MapReduceMappingsTask)) { + t.quietlyComplete(); + break; + } + t = (MapReduceMappingsTask)par; + } + else if (t.casPending(c, c - 1)) + break; + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + MapReduceMappingsTask s = rights; + if (s != null && !inForkJoinPool()) { + do { + if (s.tryUnfork()) + s.exec(); + } while ((s = s.nextRight) != null); + } + return false; + } + public final U getRawResult() { return result; } + } + + @SuppressWarnings("serial") static final class MapReduceKeysToDoubleTask + extends BulkTask { + final ObjectToDouble transformer; + final DoubleByDoubleToDouble reducer; + final double basis; + double result; + MapReduceKeysToDoubleTask rights, nextRight; + MapReduceKeysToDoubleTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + MapReduceKeysToDoubleTask nextRight, + ObjectToDouble transformer, + double basis, + DoubleByDoubleToDouble reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.basis = basis; this.reducer = reducer; + } + @SuppressWarnings("unchecked") public final boolean exec() { + final ObjectToDouble transformer = + this.transformer; + final DoubleByDoubleToDouble reducer = this.reducer; + if (transformer == null || reducer == null) + return abortOnNullFunction(); + try { + final double id = this.basis; + for (int c, b = batch(); b > 1 && baseIndex != baseLimit;) { + do {} while (!casPending(c = pending, c+1)); + (rights = new MapReduceKeysToDoubleTask + (map, this, b >>>= 1, rights, transformer, id, reducer)).fork(); + } + double r = id; + while (advance() != null) + r = reducer.apply(r, transformer.apply((K)nextKey)); + result = r; + for (MapReduceKeysToDoubleTask t = this, s;;) { + int c; BulkTask par; + if ((c = t.pending) == 0) { + for (s = t.rights; s != null; s = t.rights = s.nextRight) { + t.result = reducer.apply(t.result, s.result); + } + if ((par = t.parent) == null || + !(par instanceof MapReduceKeysToDoubleTask)) { + t.quietlyComplete(); + break; + } + t = (MapReduceKeysToDoubleTask)par; + } + else if (t.casPending(c, c - 1)) + break; + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + MapReduceKeysToDoubleTask s = rights; + if (s != null && !inForkJoinPool()) { + do { + if (s.tryUnfork()) + s.exec(); + } while ((s = s.nextRight) != null); + } + return false; + } + public final Double getRawResult() { return result; } + } + + @SuppressWarnings("serial") static final class MapReduceValuesToDoubleTask + extends BulkTask { + final ObjectToDouble transformer; + final DoubleByDoubleToDouble reducer; + final double basis; + double result; + MapReduceValuesToDoubleTask rights, nextRight; + MapReduceValuesToDoubleTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + MapReduceValuesToDoubleTask nextRight, + ObjectToDouble transformer, + double basis, + DoubleByDoubleToDouble reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.basis = basis; this.reducer = reducer; + } + @SuppressWarnings("unchecked") public final boolean exec() { + final ObjectToDouble transformer = + this.transformer; + final DoubleByDoubleToDouble reducer = this.reducer; + if (transformer == null || reducer == null) + return abortOnNullFunction(); + try { + final double id = this.basis; + for (int c, b = batch(); b > 1 && baseIndex != baseLimit;) { + do {} while (!casPending(c = pending, c+1)); + (rights = new MapReduceValuesToDoubleTask + (map, this, b >>>= 1, rights, transformer, id, reducer)).fork(); + } + double r = id; + Object v; + while ((v = advance()) != null) + r = reducer.apply(r, transformer.apply((V)v)); + result = r; + for (MapReduceValuesToDoubleTask t = this, s;;) { + int c; BulkTask par; + if ((c = t.pending) == 0) { + for (s = t.rights; s != null; s = t.rights = s.nextRight) { + t.result = reducer.apply(t.result, s.result); + } + if ((par = t.parent) == null || + !(par instanceof MapReduceValuesToDoubleTask)) { + t.quietlyComplete(); + break; + } + t = (MapReduceValuesToDoubleTask)par; + } + else if (t.casPending(c, c - 1)) + break; + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + MapReduceValuesToDoubleTask s = rights; + if (s != null && !inForkJoinPool()) { + do { + if (s.tryUnfork()) + s.exec(); + } while ((s = s.nextRight) != null); + } + return false; + } + public final Double getRawResult() { return result; } + } + + @SuppressWarnings("serial") static final class MapReduceEntriesToDoubleTask + extends BulkTask { + final ObjectToDouble> transformer; + final DoubleByDoubleToDouble reducer; + final double basis; + double result; + MapReduceEntriesToDoubleTask rights, nextRight; + MapReduceEntriesToDoubleTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + MapReduceEntriesToDoubleTask nextRight, + ObjectToDouble> transformer, + double basis, + DoubleByDoubleToDouble reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.basis = basis; this.reducer = reducer; + } + @SuppressWarnings("unchecked") public final boolean exec() { + final ObjectToDouble> transformer = + this.transformer; + final DoubleByDoubleToDouble reducer = this.reducer; + if (transformer == null || reducer == null) + return abortOnNullFunction(); + try { + final double id = this.basis; + for (int c, b = batch(); b > 1 && baseIndex != baseLimit;) { + do {} while (!casPending(c = pending, c+1)); + (rights = new MapReduceEntriesToDoubleTask + (map, this, b >>>= 1, rights, transformer, id, reducer)).fork(); + } + double r = id; + Object v; + while ((v = advance()) != null) + r = reducer.apply(r, transformer.apply(entryFor((K)nextKey, (V)v))); + result = r; + for (MapReduceEntriesToDoubleTask t = this, s;;) { + int c; BulkTask par; + if ((c = t.pending) == 0) { + for (s = t.rights; s != null; s = t.rights = s.nextRight) { + t.result = reducer.apply(t.result, s.result); + } + if ((par = t.parent) == null || + !(par instanceof MapReduceEntriesToDoubleTask)) { + t.quietlyComplete(); + break; + } + t = (MapReduceEntriesToDoubleTask)par; + } + else if (t.casPending(c, c - 1)) + break; + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + MapReduceEntriesToDoubleTask s = rights; + if (s != null && !inForkJoinPool()) { + do { + if (s.tryUnfork()) + s.exec(); + } while ((s = s.nextRight) != null); + } + return false; + } + public final Double getRawResult() { return result; } + } + + @SuppressWarnings("serial") static final class MapReduceMappingsToDoubleTask + extends BulkTask { + final ObjectByObjectToDouble transformer; + final DoubleByDoubleToDouble reducer; + final double basis; + double result; + MapReduceMappingsToDoubleTask rights, nextRight; + MapReduceMappingsToDoubleTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + MapReduceMappingsToDoubleTask nextRight, + ObjectByObjectToDouble transformer, + double basis, + DoubleByDoubleToDouble reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.basis = basis; this.reducer = reducer; + } + @SuppressWarnings("unchecked") public final boolean exec() { + final ObjectByObjectToDouble transformer = + this.transformer; + final DoubleByDoubleToDouble reducer = this.reducer; + if (transformer == null || reducer == null) + return abortOnNullFunction(); + try { + final double id = this.basis; + for (int c, b = batch(); b > 1 && baseIndex != baseLimit;) { + do {} while (!casPending(c = pending, c+1)); + (rights = new MapReduceMappingsToDoubleTask + (map, this, b >>>= 1, rights, transformer, id, reducer)).fork(); + } + double r = id; + Object v; + while ((v = advance()) != null) + r = reducer.apply(r, transformer.apply((K)nextKey, (V)v)); + result = r; + for (MapReduceMappingsToDoubleTask t = this, s;;) { + int c; BulkTask par; + if ((c = t.pending) == 0) { + for (s = t.rights; s != null; s = t.rights = s.nextRight) { + t.result = reducer.apply(t.result, s.result); + } + if ((par = t.parent) == null || + !(par instanceof MapReduceMappingsToDoubleTask)) { + t.quietlyComplete(); + break; + } + t = (MapReduceMappingsToDoubleTask)par; + } + else if (t.casPending(c, c - 1)) + break; + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + MapReduceMappingsToDoubleTask s = rights; + if (s != null && !inForkJoinPool()) { + do { + if (s.tryUnfork()) + s.exec(); + } while ((s = s.nextRight) != null); + } + return false; + } + public final Double getRawResult() { return result; } + } + + @SuppressWarnings("serial") static final class MapReduceKeysToLongTask + extends BulkTask { + final ObjectToLong transformer; + final LongByLongToLong reducer; + final long basis; + long result; + MapReduceKeysToLongTask rights, nextRight; + MapReduceKeysToLongTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + MapReduceKeysToLongTask nextRight, + ObjectToLong transformer, + long basis, + LongByLongToLong reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.basis = basis; this.reducer = reducer; + } + @SuppressWarnings("unchecked") public final boolean exec() { + final ObjectToLong transformer = + this.transformer; + final LongByLongToLong reducer = this.reducer; + if (transformer == null || reducer == null) + return abortOnNullFunction(); + try { + final long id = this.basis; + for (int c, b = batch(); b > 1 && baseIndex != baseLimit;) { + do {} while (!casPending(c = pending, c+1)); + (rights = new MapReduceKeysToLongTask + (map, this, b >>>= 1, rights, transformer, id, reducer)).fork(); + } + long r = id; + while (advance() != null) + r = reducer.apply(r, transformer.apply((K)nextKey)); + result = r; + for (MapReduceKeysToLongTask t = this, s;;) { + int c; BulkTask par; + if ((c = t.pending) == 0) { + for (s = t.rights; s != null; s = t.rights = s.nextRight) { + t.result = reducer.apply(t.result, s.result); + } + if ((par = t.parent) == null || + !(par instanceof MapReduceKeysToLongTask)) { + t.quietlyComplete(); + break; + } + t = (MapReduceKeysToLongTask)par; + } + else if (t.casPending(c, c - 1)) + break; + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + MapReduceKeysToLongTask s = rights; + if (s != null && !inForkJoinPool()) { + do { + if (s.tryUnfork()) + s.exec(); + } while ((s = s.nextRight) != null); + } + return false; + } + public final Long getRawResult() { return result; } + } + + @SuppressWarnings("serial") static final class MapReduceValuesToLongTask + extends BulkTask { + final ObjectToLong transformer; + final LongByLongToLong reducer; + final long basis; + long result; + MapReduceValuesToLongTask rights, nextRight; + MapReduceValuesToLongTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + MapReduceValuesToLongTask nextRight, + ObjectToLong transformer, + long basis, + LongByLongToLong reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.basis = basis; this.reducer = reducer; + } + @SuppressWarnings("unchecked") public final boolean exec() { + final ObjectToLong transformer = + this.transformer; + final LongByLongToLong reducer = this.reducer; + if (transformer == null || reducer == null) + return abortOnNullFunction(); + try { + final long id = this.basis; + for (int c, b = batch(); b > 1 && baseIndex != baseLimit;) { + do {} while (!casPending(c = pending, c+1)); + (rights = new MapReduceValuesToLongTask + (map, this, b >>>= 1, rights, transformer, id, reducer)).fork(); + } + long r = id; + Object v; + while ((v = advance()) != null) + r = reducer.apply(r, transformer.apply((V)v)); + result = r; + for (MapReduceValuesToLongTask t = this, s;;) { + int c; BulkTask par; + if ((c = t.pending) == 0) { + for (s = t.rights; s != null; s = t.rights = s.nextRight) { + t.result = reducer.apply(t.result, s.result); + } + if ((par = t.parent) == null || + !(par instanceof MapReduceValuesToLongTask)) { + t.quietlyComplete(); + break; + } + t = (MapReduceValuesToLongTask)par; + } + else if (t.casPending(c, c - 1)) + break; + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + MapReduceValuesToLongTask s = rights; + if (s != null && !inForkJoinPool()) { + do { + if (s.tryUnfork()) + s.exec(); + } while ((s = s.nextRight) != null); + } + return false; + } + public final Long getRawResult() { return result; } + } + + @SuppressWarnings("serial") static final class MapReduceEntriesToLongTask + extends BulkTask { + final ObjectToLong> transformer; + final LongByLongToLong reducer; + final long basis; + long result; + MapReduceEntriesToLongTask rights, nextRight; + MapReduceEntriesToLongTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + MapReduceEntriesToLongTask nextRight, + ObjectToLong> transformer, + long basis, + LongByLongToLong reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.basis = basis; this.reducer = reducer; + } + @SuppressWarnings("unchecked") public final boolean exec() { + final ObjectToLong> transformer = + this.transformer; + final LongByLongToLong reducer = this.reducer; + if (transformer == null || reducer == null) + return abortOnNullFunction(); + try { + final long id = this.basis; + for (int c, b = batch(); b > 1 && baseIndex != baseLimit;) { + do {} while (!casPending(c = pending, c+1)); + (rights = new MapReduceEntriesToLongTask + (map, this, b >>>= 1, rights, transformer, id, reducer)).fork(); + } + long r = id; + Object v; + while ((v = advance()) != null) + r = reducer.apply(r, transformer.apply(entryFor((K)nextKey, (V)v))); + result = r; + for (MapReduceEntriesToLongTask t = this, s;;) { + int c; BulkTask par; + if ((c = t.pending) == 0) { + for (s = t.rights; s != null; s = t.rights = s.nextRight) { + t.result = reducer.apply(t.result, s.result); + } + if ((par = t.parent) == null || + !(par instanceof MapReduceEntriesToLongTask)) { + t.quietlyComplete(); + break; + } + t = (MapReduceEntriesToLongTask)par; + } + else if (t.casPending(c, c - 1)) + break; + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + MapReduceEntriesToLongTask s = rights; + if (s != null && !inForkJoinPool()) { + do { + if (s.tryUnfork()) + s.exec(); + } while ((s = s.nextRight) != null); + } + return false; + } + public final Long getRawResult() { return result; } + } + + @SuppressWarnings("serial") static final class MapReduceMappingsToLongTask + extends BulkTask { + final ObjectByObjectToLong transformer; + final LongByLongToLong reducer; + final long basis; + long result; + MapReduceMappingsToLongTask rights, nextRight; + MapReduceMappingsToLongTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + MapReduceMappingsToLongTask nextRight, + ObjectByObjectToLong transformer, + long basis, + LongByLongToLong reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.basis = basis; this.reducer = reducer; + } + @SuppressWarnings("unchecked") public final boolean exec() { + final ObjectByObjectToLong transformer = + this.transformer; + final LongByLongToLong reducer = this.reducer; + if (transformer == null || reducer == null) + return abortOnNullFunction(); + try { + final long id = this.basis; + for (int c, b = batch(); b > 1 && baseIndex != baseLimit;) { + do {} while (!casPending(c = pending, c+1)); + (rights = new MapReduceMappingsToLongTask + (map, this, b >>>= 1, rights, transformer, id, reducer)).fork(); + } + long r = id; + Object v; + while ((v = advance()) != null) + r = reducer.apply(r, transformer.apply((K)nextKey, (V)v)); + result = r; + for (MapReduceMappingsToLongTask t = this, s;;) { + int c; BulkTask par; + if ((c = t.pending) == 0) { + for (s = t.rights; s != null; s = t.rights = s.nextRight) { + t.result = reducer.apply(t.result, s.result); + } + if ((par = t.parent) == null || + !(par instanceof MapReduceMappingsToLongTask)) { + t.quietlyComplete(); + break; + } + t = (MapReduceMappingsToLongTask)par; + } + else if (t.casPending(c, c - 1)) + break; + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + MapReduceMappingsToLongTask s = rights; + if (s != null && !inForkJoinPool()) { + do { + if (s.tryUnfork()) + s.exec(); + } while ((s = s.nextRight) != null); + } + return false; + } + public final Long getRawResult() { return result; } + } + + @SuppressWarnings("serial") static final class MapReduceKeysToIntTask + extends BulkTask { + final ObjectToInt transformer; + final IntByIntToInt reducer; + final int basis; + int result; + MapReduceKeysToIntTask rights, nextRight; + MapReduceKeysToIntTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + MapReduceKeysToIntTask nextRight, + ObjectToInt transformer, + int basis, + IntByIntToInt reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.basis = basis; this.reducer = reducer; + } + @SuppressWarnings("unchecked") public final boolean exec() { + final ObjectToInt transformer = + this.transformer; + final IntByIntToInt reducer = this.reducer; + if (transformer == null || reducer == null) + return abortOnNullFunction(); + try { + final int id = this.basis; + for (int c, b = batch(); b > 1 && baseIndex != baseLimit;) { + do {} while (!casPending(c = pending, c+1)); + (rights = new MapReduceKeysToIntTask + (map, this, b >>>= 1, rights, transformer, id, reducer)).fork(); + } + int r = id; + while (advance() != null) + r = reducer.apply(r, transformer.apply((K)nextKey)); + result = r; + for (MapReduceKeysToIntTask t = this, s;;) { + int c; BulkTask par; + if ((c = t.pending) == 0) { + for (s = t.rights; s != null; s = t.rights = s.nextRight) { + t.result = reducer.apply(t.result, s.result); + } + if ((par = t.parent) == null || + !(par instanceof MapReduceKeysToIntTask)) { + t.quietlyComplete(); + break; + } + t = (MapReduceKeysToIntTask)par; + } + else if (t.casPending(c, c - 1)) + break; + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + MapReduceKeysToIntTask s = rights; + if (s != null && !inForkJoinPool()) { + do { + if (s.tryUnfork()) + s.exec(); + } while ((s = s.nextRight) != null); + } + return false; + } + public final Integer getRawResult() { return result; } + } + + @SuppressWarnings("serial") static final class MapReduceValuesToIntTask + extends BulkTask { + final ObjectToInt transformer; + final IntByIntToInt reducer; + final int basis; + int result; + MapReduceValuesToIntTask rights, nextRight; + MapReduceValuesToIntTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + MapReduceValuesToIntTask nextRight, + ObjectToInt transformer, + int basis, + IntByIntToInt reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.basis = basis; this.reducer = reducer; + } + @SuppressWarnings("unchecked") public final boolean exec() { + final ObjectToInt transformer = + this.transformer; + final IntByIntToInt reducer = this.reducer; + if (transformer == null || reducer == null) + return abortOnNullFunction(); + try { + final int id = this.basis; + for (int c, b = batch(); b > 1 && baseIndex != baseLimit;) { + do {} while (!casPending(c = pending, c+1)); + (rights = new MapReduceValuesToIntTask + (map, this, b >>>= 1, rights, transformer, id, reducer)).fork(); + } + int r = id; + Object v; + while ((v = advance()) != null) + r = reducer.apply(r, transformer.apply((V)v)); + result = r; + for (MapReduceValuesToIntTask t = this, s;;) { + int c; BulkTask par; + if ((c = t.pending) == 0) { + for (s = t.rights; s != null; s = t.rights = s.nextRight) { + t.result = reducer.apply(t.result, s.result); + } + if ((par = t.parent) == null || + !(par instanceof MapReduceValuesToIntTask)) { + t.quietlyComplete(); + break; + } + t = (MapReduceValuesToIntTask)par; + } + else if (t.casPending(c, c - 1)) + break; + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + MapReduceValuesToIntTask s = rights; + if (s != null && !inForkJoinPool()) { + do { + if (s.tryUnfork()) + s.exec(); + } while ((s = s.nextRight) != null); + } + return false; + } + public final Integer getRawResult() { return result; } + } + + @SuppressWarnings("serial") static final class MapReduceEntriesToIntTask + extends BulkTask { + final ObjectToInt> transformer; + final IntByIntToInt reducer; + final int basis; + int result; + MapReduceEntriesToIntTask rights, nextRight; + MapReduceEntriesToIntTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + MapReduceEntriesToIntTask nextRight, + ObjectToInt> transformer, + int basis, + IntByIntToInt reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.basis = basis; this.reducer = reducer; + } + @SuppressWarnings("unchecked") public final boolean exec() { + final ObjectToInt> transformer = + this.transformer; + final IntByIntToInt reducer = this.reducer; + if (transformer == null || reducer == null) + return abortOnNullFunction(); + try { + final int id = this.basis; + for (int c, b = batch(); b > 1 && baseIndex != baseLimit;) { + do {} while (!casPending(c = pending, c+1)); + (rights = new MapReduceEntriesToIntTask + (map, this, b >>>= 1, rights, transformer, id, reducer)).fork(); + } + int r = id; + Object v; + while ((v = advance()) != null) + r = reducer.apply(r, transformer.apply(entryFor((K)nextKey, (V)v))); + result = r; + for (MapReduceEntriesToIntTask t = this, s;;) { + int c; BulkTask par; + if ((c = t.pending) == 0) { + for (s = t.rights; s != null; s = t.rights = s.nextRight) { + t.result = reducer.apply(t.result, s.result); + } + if ((par = t.parent) == null || + !(par instanceof MapReduceEntriesToIntTask)) { + t.quietlyComplete(); + break; + } + t = (MapReduceEntriesToIntTask)par; + } + else if (t.casPending(c, c - 1)) + break; + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + MapReduceEntriesToIntTask s = rights; + if (s != null && !inForkJoinPool()) { + do { + if (s.tryUnfork()) + s.exec(); + } while ((s = s.nextRight) != null); + } + return false; + } + public final Integer getRawResult() { return result; } + } + + @SuppressWarnings("serial") static final class MapReduceMappingsToIntTask + extends BulkTask { + final ObjectByObjectToInt transformer; + final IntByIntToInt reducer; + final int basis; + int result; + MapReduceMappingsToIntTask rights, nextRight; + MapReduceMappingsToIntTask + (ConcurrentHashMapV8 m, BulkTask p, int b, + MapReduceMappingsToIntTask rights, + ObjectByObjectToInt transformer, + int basis, + IntByIntToInt reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.basis = basis; this.reducer = reducer; + } + @SuppressWarnings("unchecked") public final boolean exec() { + final ObjectByObjectToInt transformer = + this.transformer; + final IntByIntToInt reducer = this.reducer; + if (transformer == null || reducer == null) + return abortOnNullFunction(); + try { + final int id = this.basis; + for (int c, b = batch(); b > 1 && baseIndex != baseLimit;) { + do {} while (!casPending(c = pending, c+1)); + (rights = new MapReduceMappingsToIntTask + (map, this, b >>>= 1, rights, transformer, id, reducer)).fork(); + } + int r = id; + Object v; + while ((v = advance()) != null) + r = reducer.apply(r, transformer.apply((K)nextKey, (V)v)); + result = r; + for (MapReduceMappingsToIntTask t = this, s;;) { + int c; BulkTask par; + if ((c = t.pending) == 0) { + for (s = t.rights; s != null; s = t.rights = s.nextRight) { + t.result = reducer.apply(t.result, s.result); + } + if ((par = t.parent) == null || + !(par instanceof MapReduceMappingsToIntTask)) { + t.quietlyComplete(); + break; + } + t = (MapReduceMappingsToIntTask)par; + } + else if (t.casPending(c, c - 1)) + break; + } + } catch (Throwable ex) { + return tryCompleteComputation(ex); + } + MapReduceMappingsToIntTask s = rights; + if (s != null && !inForkJoinPool()) { + do { + if (s.tryUnfork()) + s.exec(); + } while ((s = s.nextRight) != null); + } + return false; + } + public final Integer getRawResult() { return result; } + } + + // Unsafe mechanics private static final sun.misc.Unsafe UNSAFE; private static final long counterOffset; @@ -3348,5 +6812,4 @@ public class ConcurrentHashMapV8 } } } - }