--- jsr166/src/jsr166e/ConcurrentHashMapV8.java 2012/07/04 20:21:02 1.43 +++ jsr166/src/jsr166e/ConcurrentHashMapV8.java 2013/02/15 22:20:46 1.99 @@ -5,7 +5,8 @@ */ package jsr166e; -import jsr166e.LongAdder; + +import java.util.Comparator; import java.util.Arrays; import java.util.Map; import java.util.Set; @@ -20,9 +21,9 @@ import java.util.Enumeration; import java.util.ConcurrentModificationException; import java.util.NoSuchElementException; import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.ThreadLocalRandom; -import java.util.concurrent.locks.LockSupport; import java.util.concurrent.locks.AbstractQueuedSynchronizer; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; import java.io.Serializable; /** @@ -37,25 +38,28 @@ import java.io.Serializable; * interoperable with {@code Hashtable} in programs that rely on its * thread safety but not on its synchronization details. * - *

Retrieval operations (including {@code get}) generally do not + *

Retrieval operations (including {@code get}) generally do not * block, so may overlap with update operations (including {@code put} * and {@code remove}). Retrievals reflect the results of the most * recently completed update operations holding upon their - * onset. For aggregate operations such as {@code putAll} and {@code - * clear}, concurrent retrievals may reflect insertion or removal of - * only some entries. Similarly, Iterators and Enumerations return - * elements reflecting the state of the hash table at some point at or - * since the creation of the iterator/enumeration. They do - * not throw {@link ConcurrentModificationException}. - * However, iterators are designed to be used by only one thread at a - * time. Bear in mind that the results of aggregate status methods - * including {@code size}, {@code isEmpty}, and {@code containsValue} - * are typically useful only when a map is not undergoing concurrent - * updates in other threads. Otherwise the results of these methods - * reflect transient states that may be adequate for monitoring - * or estimation purposes, but not for program control. + * onset. (More formally, an update operation for a given key bears a + * happens-before relation with any (non-null) retrieval for + * that key reporting the updated value.) For aggregate operations + * such as {@code putAll} and {@code clear}, concurrent retrievals may + * reflect insertion or removal of only some entries. Similarly, + * Iterators and Enumerations return elements reflecting the state of + * the hash table at some point at or since the creation of the + * iterator/enumeration. They do not throw {@link + * ConcurrentModificationException}. However, iterators are designed + * to be used by only one thread at a time. Bear in mind that the + * results of aggregate status methods including {@code size}, {@code + * isEmpty}, and {@code containsValue} are typically useful only when + * a map is not undergoing concurrent updates in other threads. + * Otherwise the results of these methods reflect transient states + * that may be adequate for monitoring or estimation purposes, but not + * for program control. * - *

The table is dynamically expanded when there are too many + *

The table is dynamically expanded when there are too many * collisions (i.e., keys that have distinct hash codes but fall into * the same slot modulo the table size), with the expected average * effect of maintaining roughly two bins per mapping (corresponding @@ -76,68 +80,150 @@ import java.io.Serializable; * {@code hashCode()} is a sure way to slow down performance of any * hash table. * + *

A {@link Set} projection of a ConcurrentHashMapV8 may be created + * (using {@link #newKeySet()} or {@link #newKeySet(int)}), or viewed + * (using {@link #keySet(Object)} when only keys are of interest, and the + * mapped values are (perhaps transiently) not used or all take the + * same mapping value. + * + *

A ConcurrentHashMapV8 can be used as scalable frequency map (a + * form of histogram or multiset) by using {@link LongAdder} values + * and initializing via {@link #computeIfAbsent}. For example, to add + * a count to a {@code ConcurrentHashMapV8 freqs}, you + * can use {@code freqs.computeIfAbsent(k -> new + * LongAdder()).increment();} + * *

This class and its views and iterators implement all of the * optional methods of the {@link Map} and {@link Iterator} * interfaces. * - *

Like {@link Hashtable} but unlike {@link HashMap}, this class + *

Like {@link Hashtable} but unlike {@link HashMap}, this class * does not allow {@code null} to be used as a key or value. * + *

ConcurrentHashMapV8s support sequential and parallel operations + * bulk operations. (Parallel forms use the {@link + * ForkJoinPool#commonPool()}). Tasks that may be used in other + * contexts are available in class {@link ForkJoinTasks}. These + * operations are designed to be safely, and often sensibly, applied + * even with maps that are being concurrently updated by other + * threads; for example, when computing a snapshot summary of the + * values in a shared registry. There are three kinds of operation, + * each with four forms, accepting functions with Keys, Values, + * Entries, and (Key, Value) arguments and/or return values. Because + * the elements of a ConcurrentHashMapV8 are not ordered in any + * particular way, and may be processed in different orders in + * different parallel executions, the correctness of supplied + * functions should not depend on any ordering, or on any other + * objects or values that may transiently change while computation is + * in progress; and except for forEach actions, should ideally be + * side-effect-free. + * + *

+ * + *

The concurrency properties of bulk operations follow + * from those of ConcurrentHashMapV8: Any non-null result returned + * from {@code get(key)} and related access methods bears a + * happens-before relation with the associated insertion or + * update. The result of any bulk operation reflects the + * composition of these per-element relations (but is not + * necessarily atomic with respect to the map as a whole unless it + * is somehow known to be quiescent). Conversely, because keys + * and values in the map are never null, null serves as a reliable + * atomic indicator of the current lack of any result. To + * maintain this property, null serves as an implicit basis for + * all non-scalar reduction operations. For the double, long, and + * int versions, the basis should be one that, when combined with + * any other value, returns that other value (more formally, it + * should be the identity element for the reduction). Most common + * reductions have these properties; for example, computing a sum + * with basis 0 or a minimum with basis MAX_VALUE. + * + *

Search and transformation functions provided as arguments + * should similarly return null to indicate the lack of any result + * (in which case it is not used). In the case of mapped + * reductions, this also enables transformations to serve as + * filters, returning null (or, in the case of primitive + * specializations, the identity basis) if the element should not + * be combined. You can create compound transformations and + * filterings by composing them yourself under this "null means + * there is nothing there now" rule before using them in search or + * reduce operations. + * + *

Methods accepting and/or returning Entry arguments maintain + * key-value associations. They may be useful for example when + * finding the key for the greatest value. Note that "plain" Entry + * arguments can be supplied using {@code new + * AbstractMap.SimpleEntry(k,v)}. + * + *

Bulk operations may complete abruptly, throwing an + * exception encountered in the application of a supplied + * function. Bear in mind when handling such exceptions that other + * concurrently executing functions could also have thrown + * exceptions, or would have done so if the first exception had + * not occurred. + * + *

Speedups for parallel compared to sequential forms are common + * but not guaranteed. Parallel operations involving brief functions + * on small maps may execute more slowly than sequential forms if the + * underlying work to parallelize the computation is more expensive + * than the computation itself. Similarly, parallelization may not + * lead to much actual parallelism if all processors are busy + * performing unrelated tasks. + * + *

All arguments to all task methods must be non-null. + * + *

jsr166e note: During transition, this class + * uses nested functional interfaces with different names but the + * same forms as those expected for JDK8. + * *

This class is a member of the * * Java Collections Framework. * - *

jsr166e note: This class is a candidate replacement for - * java.util.concurrent.ConcurrentHashMap. - * * @since 1.5 * @author Doug Lea * @param the type of keys maintained by this map * @param the type of mapped values */ -public class ConcurrentHashMapV8 - implements ConcurrentMap, Serializable { +public class ConcurrentHashMapV8 + implements ConcurrentMap, Serializable { private static final long serialVersionUID = 7249069246763182397L; /** - * A function computing a mapping from the given key to a value. - * This is a place-holder for an upcoming JDK8 interface. - */ - public static interface MappingFunction { - /** - * Returns a value for the given key, or null if there is no mapping. - * - * @param key the (non-null) key - * @return a value for the key, or null if none - */ - V map(K key); - } - - /** - * A function computing a new mapping given a key and its current - * mapped value (or {@code null} if there is no current - * mapping). This is a place-holder for an upcoming JDK8 - * interface. - */ - public static interface RemappingFunction { - /** - * Returns a new value given a key and its current value. - * - * @param key the (non-null) key - * @param value the current value, or null if there is no mapping - * @return a value for the key, or null if none - */ - V remap(K key, V value); - } - - /** * A partitionable iterator. A Spliterator can be traversed * directly, but can also be partitioned (before traversal) by * creating another Spliterator that covers a non-overlapping * portion of the elements, and so may be amenable to parallel * execution. * - *

This interface exports a subset of expected JDK8 + *

This interface exports a subset of expected JDK8 * functionality. * *

Sample usage: Here is one (of the several) ways to compute @@ -145,18 +231,20 @@ public class ConcurrentHashMapV8 * framework. As illustrated here, Spliterators are well suited to * designs in which a task repeatedly splits off half its work * into forked subtasks until small enough to process directly, - * and then joins these subtasks. Variants of this style can be - * also be used in completion-based designs. + * and then joins these subtasks. Variants of this style can also + * be used in completion-based designs. * *

      * {@code ConcurrentHashMapV8 m = ...
-     * // Uses parallel depth of log2 of size / (parallelism * slack of 8).
-     * int depth = 32 - Integer.numberOfLeadingZeros(m.size() / (aForkJoinPool.getParallelism() * 8));
-     * long sum = aForkJoinPool.invoke(new SumValues(m.valueSpliterator(), depth, null));
+     * // split as if have 8 * parallelism, for load balance
+     * int n = m.size();
+     * int p = aForkJoinPool.getParallelism() * 8;
+     * int split = (n < p)? n : p;
+     * long sum = aForkJoinPool.invoke(new SumValues(m.valueSpliterator(), split, null));
      * // ...
      * static class SumValues extends RecursiveTask {
      *   final Spliterator s;
-     *   final int depth;             // number of splits before processing
+     *   final int split;             // split while > 1
      *   final SumValues nextJoin;    // records forked subtasks to join
      *   SumValues(Spliterator s, int depth, SumValues nextJoin) {
      *     this.s = s; this.depth = depth; this.nextJoin = nextJoin;
@@ -164,8 +252,8 @@ public class ConcurrentHashMapV8
      *   public Long compute() {
      *     long sum = 0;
      *     SumValues subtasks = null; // fork subtasks
-     *     for (int d = depth - 1; d >= 0; --d)
-     *       (subtasks = new SumValues(s.split(), d, subtasks)).fork();
+     *     for (int s = split >>> 1; s > 0; s >>>= 1)
+     *       (subtasks = new SumValues(s.split(), s, subtasks)).fork();
      *     while (s.hasNext())        // directly process remaining elements
      *       sum += s.next();
      *     for (SumValues t = subtasks; t != null; t = t.nextJoin)
@@ -192,21 +280,9 @@ public class ConcurrentHashMapV8
          * @return a Spliterator covering approximately half of the
          * elements
          * @throws IllegalStateException if this Spliterator has
-         * already commenced traversing elements.
+         * already commenced traversing elements
          */
         Spliterator split();
-
-        /**
-         * Returns a Spliterator producing the same elements as this
-         * Spliterator. This method may be used for example to create
-         * a second Spliterator before a traversal, in order to later
-         * perform a second traversal.
-         *
-         * @return a Spliterator covering the same range as this Spliterator.
-         * @throws IllegalStateException if this Spliterator has
-         * already commenced traversing elements.
-         */
-        Spliterator clone();
     }
 
     /*
@@ -219,17 +295,15 @@ public class ConcurrentHashMapV8
      * the same or better than java.util.HashMap, and to support high
      * initial insertion rates on an empty table by many threads.
      *
-     * Each key-value mapping is held in a Node.  Because Node fields
-     * can contain special values, they are defined using plain Object
-     * types. Similarly in turn, all internal methods that use them
-     * work off Object types. And similarly, so do the internal
-     * methods of auxiliary iterator and view classes.  All public
-     * generic typed methods relay in/out of these internal methods,
-     * supplying null-checks and casts as needed. This also allows
-     * many of the public methods to be factored into a smaller number
-     * of internal methods (although sadly not so for the five
-     * variants of put-related operations). The validation-based
-     * approach explained below leads to a lot of code sprawl because
+     * Each key-value mapping is held in a Node.  Because Node key
+     * fields can contain special values, they are defined using plain
+     * Object types (not type "K"). This leads to a lot of explicit
+     * casting (and many explicit warning suppressions to tell
+     * compilers not to complain about it). It also allows some of the
+     * public methods to be factored into a smaller number of internal
+     * methods (although sadly not so for the five variants of
+     * put-related operations). The validation-based approach
+     * explained below leads to a lot of code sprawl because
      * retry-control precludes factoring into smaller methods.
      *
      * The table is lazily initialized to a power-of-two size upon the
@@ -243,19 +317,12 @@ public class ConcurrentHashMapV8
      * as lookups check hash code and non-nullness of value before
      * checking key equality.
      *
-     * We use the top two bits of Node hash fields for control
-     * purposes -- they are available anyway because of addressing
-     * constraints.  As explained further below, these top bits are
-     * used as follows:
-     *  00 - Normal
-     *  01 - Locked
-     *  11 - Locked and may have a thread waiting for lock
-     *  10 - Node is a forwarding node
-     *
-     * The lower 30 bits of each Node's hash field contain a
-     * transformation of the key's hash code, except for forwarding
-     * nodes, for which the lower bits are zero (and so always have
-     * hash field == MOVED).
+     * We use the top (sign) bit of Node hash fields for control
+     * purposes -- it is available anyway because of addressing
+     * constraints.  Nodes with negative hash fields are forwarding
+     * nodes to either TreeBins or resized tables.  The lower 31 bits
+     * of each normal Node's hash field contain a transformation of
+     * the key's hash code.
      *
      * Insertion (via put or its variants) of the first node in an
      * empty bin is performed by just CASing it to the bin.  This is
@@ -264,12 +331,8 @@ public class ConcurrentHashMapV8
      * delete, and replace) require locks.  We do not want to waste
      * the space required to associate a distinct lock object with
      * each bin, so instead use the first node of a bin list itself as
-     * a lock. Blocking support for these locks relies on the builtin
-     * "synchronized" monitors.  However, we also need a tryLock
-     * construction, so we overlay these by using bits of the Node
-     * hash field for lock control (see above), and so normally use
-     * builtin monitors only for blocking and signalling using
-     * wait/notifyAll constructions. See Node.tryAwaitLock.
+     * a lock. Locking support for these locks relies on builtin
+     * "synchronized" monitors.
      *
      * Using the first node of a list as a lock does not by itself
      * suffice though: When a node is locked, any update must first
@@ -331,46 +394,46 @@ public class ConcurrentHashMapV8
      * iterators in the same way.
      *
      * The table is resized when occupancy exceeds a percentage
-     * threshold (nominally, 0.75, but see below).  Only a single
-     * thread performs the resize (using field "sizeCtl", to arrange
-     * exclusion), but the table otherwise remains usable for reads
-     * and updates. Resizing proceeds by transferring bins, one by
-     * one, from the table to the next table.  Because we are using
-     * power-of-two expansion, the elements from each bin must either
-     * stay at same index, or move with a power of two offset. We
-     * eliminate unnecessary node creation by catching cases where old
-     * nodes can be reused because their next fields won't change.  On
-     * average, only about one-sixth of them need cloning when a table
-     * doubles. The nodes they replace will be garbage collectable as
-     * soon as they are no longer referenced by any reader thread that
-     * may be in the midst of concurrently traversing table.  Upon
-     * transfer, the old table bin contains only a special forwarding
-     * node (with hash field "MOVED") that contains the next table as
-     * its key. On encountering a forwarding node, access and update
-     * operations restart, using the new table.
-     *
-     * Each bin transfer requires its bin lock. However, unlike other
-     * cases, a transfer can skip a bin if it fails to acquire its
-     * lock, and revisit it later (unless it is a TreeBin). Method
-     * rebuild maintains a buffer of TRANSFER_BUFFER_SIZE bins that
-     * have been skipped because of failure to acquire a lock, and
-     * blocks only if none are available (i.e., only very rarely).
-     * The transfer operation must also ensure that all accessible
-     * bins in both the old and new table are usable by any traversal.
-     * When there are no lock acquisition failures, this is arranged
-     * simply by proceeding from the last bin (table.length - 1) up
-     * towards the first.  Upon seeing a forwarding node, traversals
-     * (see class InternalIterator) arrange to move to the new table
-     * without revisiting nodes.  However, when any node is skipped
-     * during a transfer, all earlier table bins may have become
-     * visible, so are initialized with a reverse-forwarding node back
-     * to the old table until the new ones are established. (This
-     * sometimes requires transiently locking a forwarding node, which
-     * is possible under the above encoding.) These more expensive
-     * mechanics trigger only when necessary.
+     * threshold (nominally, 0.75, but see below).  Any thread
+     * noticing an overfull bin may assist in resizing after the
+     * initiating thread allocates and sets up the replacement
+     * array. However, rather than stalling, these other threads may
+     * proceed with insertions etc.  The use of TreeBins shields us
+     * from the worst case effects of overfilling while resizes are in
+     * progress.  Resizing proceeds by transferring bins, one by one,
+     * from the table to the next table. To enable concurrency, the
+     * next table must be (incrementally) prefilled with place-holders
+     * serving as reverse forwarders to the old table.  Because we are
+     * using power-of-two expansion, the elements from each bin must
+     * either stay at same index, or move with a power of two
+     * offset. We eliminate unnecessary node creation by catching
+     * cases where old nodes can be reused because their next fields
+     * won't change.  On average, only about one-sixth of them need
+     * cloning when a table doubles. The nodes they replace will be
+     * garbage collectable as soon as they are no longer referenced by
+     * any reader thread that may be in the midst of concurrently
+     * traversing table.  Upon transfer, the old table bin contains
+     * only a special forwarding node (with hash field "MOVED") that
+     * contains the next table as its key. On encountering a
+     * forwarding node, access and update operations restart, using
+     * the new table.
+     *
+     * Each bin transfer requires its bin lock, which can stall
+     * waiting for locks while resizing. However, because other
+     * threads can join in and help resize rather than contend for
+     * locks, average aggregate waits become shorter as resizing
+     * progresses.  The transfer operation must also ensure that all
+     * accessible bins in both the old and new table are usable by any
+     * traversal.  This is arranged by proceeding from the last bin
+     * (table.length - 1) up towards the first.  Upon seeing a
+     * forwarding node, traversals (see class Traverser) arrange to
+     * move to the new table without revisiting nodes.  However, to
+     * ensure that no intervening nodes are skipped, bin splitting can
+     * only begin after the associated reverse-forwarders are in
+     * place.
      *
      * The traversal scheme also applies to partial traversals of
-     * ranges of bins (via an alternate InternalIterator constructor)
+     * ranges of bins (via an alternate Traverser constructor)
      * to support partitioned aggregate operations.  Also, read-only
      * operations give up if ever forwarded to a null table, which
      * provides support for shutdown-style clearing, which is also not
@@ -382,20 +445,20 @@ public class ConcurrentHashMapV8
      * These cases attempt to override the initial capacity settings,
      * but harmlessly fail to take effect in cases of races.
      *
-     * The element count is maintained using a LongAdder, which avoids
-     * contention on updates but can encounter cache thrashing if read
-     * too frequently during concurrent access. To avoid reading so
-     * often, resizing is attempted either when a bin lock is
-     * contended, or upon adding to a bin already holding two or more
-     * nodes (checked before adding in the xIfAbsent methods, after
-     * adding in others). Under uniform hash distributions, the
-     * probability of this occurring at threshold is around 13%,
-     * meaning that only about 1 in 8 puts check threshold (and after
-     * resizing, many fewer do so). But this approximation has high
-     * variance for small table sizes, so we check on any collision
-     * for sizes <= 64. The bulk putAll operation further reduces
-     * contention by only committing count updates upon these size
-     * checks.
+     * The element count is maintained using a specialization of
+     * LongAdder. We need to incorporate a specialization rather than
+     * just use a LongAdder in order to access implicit
+     * contention-sensing that leads to creation of multiple
+     * CounterCells.  The counter mechanics avoid contention on
+     * updates but can encounter cache thrashing if read too
+     * frequently during concurrent access. To avoid reading so often,
+     * resizing under contention is attempted only upon adding to a
+     * bin already holding two or more nodes. Under uniform hash
+     * distributions, the probability of this occurring at threshold
+     * is around 13%, meaning that only about 1 in 8 puts check
+     * threshold (and after resizing, many fewer do so). The bulk
+     * putAll operation further reduces contention by only committing
+     * count updates upon these size checks.
      *
      * Maintaining API and serialization compatibility with previous
      * versions of this class introduces several oddities. Mainly: We
@@ -446,27 +509,68 @@ public class ConcurrentHashMapV8
     private static final float LOAD_FACTOR = 0.75f;
 
     /**
-     * The buffer size for skipped bins during transfers. The
-     * value is arbitrary but should be large enough to avoid
-     * most locking stalls during resizes.
-     */
-    private static final int TRANSFER_BUFFER_SIZE = 32;
-
-    /**
      * The bin count threshold for using a tree rather than list for a
      * bin.  The value reflects the approximate break-even point for
      * using tree-based operations.
      */
     private static final int TREE_THRESHOLD = 8;
 
+    /**
+     * Minimum number of rebinnings per transfer step. Ranges are
+     * subdivided to allow multiple resizer threads.  This value
+     * serves as a lower bound to avoid resizers encountering
+     * excessive memory contention.  The value should be at least
+     * DEFAULT_CAPACITY.
+     */
+    private static final int MIN_TRANSFER_STRIDE = 16;
+
     /*
-     * Encodings for special uses of Node hash fields. See above for
-     * explanation.
+     * Encodings for Node hash fields. See above for explanation.
      */
     static final int MOVED     = 0x80000000; // hash field for forwarding nodes
-    static final int LOCKED    = 0x40000000; // set/tested only as a bit
-    static final int WAITING   = 0xc0000000; // both bits set/tested together
-    static final int HASH_BITS = 0x3fffffff; // usable bits of normal node hash
+    static final int HASH_BITS = 0x7fffffff; // usable bits of normal node hash
+
+    /** Number of CPUS, to place bounds on some sizings */
+    static final int NCPU = Runtime.getRuntime().availableProcessors();
+
+    /* ---------------- Counters -------------- */
+
+    // Adapted from LongAdder and Striped64.
+    // See their internal docs for explanation.
+
+    // A padded cell for distributing counts
+    static final class CounterCell {
+        volatile long p0, p1, p2, p3, p4, p5, p6;
+        volatile long value;
+        volatile long q0, q1, q2, q3, q4, q5, q6;
+        CounterCell(long x) { value = x; }
+    }
+
+    /**
+     * Holder for the thread-local hash code determining which
+     * CounterCell to use. The code is initialized via the
+     * counterHashCodeGenerator, but may be moved upon collisions.
+     */
+    static final class CounterHashCode {
+        int code;
+    }
+
+    /**
+     * Generates initial value for per-thread CounterHashCodes
+     */
+    static final AtomicInteger counterHashCodeGenerator = new AtomicInteger();
+
+    /**
+     * Increment for counterHashCodeGenerator. See class ThreadLocal
+     * for explanation.
+     */
+    static final int SEED_INCREMENT = 0x61c88647;
+
+    /**
+     * Per-thread counter hash codes. Shared across all instances.
+     */
+    static final ThreadLocal threadCounterHashCode =
+        new ThreadLocal();
 
     /* ---------------- Fields -------------- */
 
@@ -474,26 +578,54 @@ public class ConcurrentHashMapV8
      * The array of bins. Lazily initialized upon first insertion.
      * Size is always a power of two. Accessed directly by iterators.
      */
-    transient volatile Node[] table;
+    transient volatile Node[] table;
+
+    /**
+     * The next table to use; non-null only while resizing.
+     */
+    private transient volatile Node[] nextTable;
 
     /**
-     * The counter maintaining number of elements.
+     * Base counter value, used mainly when there is no contention,
+     * but also as a fallback during table initialization
+     * races. Updated via CAS.
      */
-    private transient final LongAdder counter;
+    private transient volatile long baseCount;
 
     /**
      * Table initialization and resizing control.  When negative, the
-     * table is being initialized or resized. Otherwise, when table is
-     * null, holds the initial table size to use upon creation, or 0
-     * for default. After initialization, holds the next element count
-     * value upon which to resize the table.
+     * table is being initialized or resized: -1 for initialization,
+     * else -(1 + the number of active resizing threads).  Otherwise,
+     * when table is null, holds the initial table size to use upon
+     * creation, or 0 for default. After initialization, holds the
+     * next element count value upon which to resize the table.
      */
     private transient volatile int sizeCtl;
 
+    /**
+     * The next table index (plus one) to split while resizing.
+     */
+    private transient volatile int transferIndex;
+
+    /**
+     * The least available table index to split while resizing.
+     */
+    private transient volatile int transferOrigin;
+
+    /**
+     * Spinlock (locked via CAS) used when resizing and/or creating Cells.
+     */
+    private transient volatile int counterBusy;
+
+    /**
+     * Table of counter cells. When non-null, size is a power of 2.
+     */
+    private transient volatile CounterCell[] counterCells;
+
     // views
-    private transient KeySet keySet;
-    private transient Values values;
-    private transient EntrySet entrySet;
+    private transient KeySetView keySet;
+    private transient ValuesView values;
+    private transient EntrySetView entrySet;
 
     /** For serialization compatibility. Null unless serialized; see below */
     private Segment[] segments;
@@ -512,16 +644,19 @@ public class ConcurrentHashMapV8
      * inline assignments below.
      */
 
-    static final Node tabAt(Node[] tab, int i) { // used by InternalIterator
-        return (Node)UNSAFE.getObjectVolatile(tab, ((long)i< Node tabAt
+        (Node[] tab, int i) { // used by Traverser
+        return (Node)U.getObjectVolatile(tab, ((long)i << ASHIFT) + ABASE);
     }
 
-    private static final boolean casTabAt(Node[] tab, int i, Node c, Node v) {
-        return UNSAFE.compareAndSwapObject(tab, ((long)i< boolean casTabAt
+        (Node[] tab, int i, Node c, Node v) {
+        return U.compareAndSwapObject(tab, ((long)i << ASHIFT) + ABASE, c, v);
     }
 
-    private static final void setTabAt(Node[] tab, int i, Node v) {
-        UNSAFE.putObjectVolatile(tab, ((long)i< void setTabAt
+        (Node[] tab, int i, Node v) {
+        U.putObjectVolatile(tab, ((long)i << ASHIFT) + ABASE, v);
     }
 
     /* ---------------- Nodes -------------- */
@@ -536,84 +671,18 @@ public class ConcurrentHashMapV8
      * before a val, but can only be used after checking val to be
      * non-null.
      */
-    static class Node {
-        volatile int hash;
+    static class Node {
+        final int hash;
         final Object key;
-        volatile Object val;
-        volatile Node next;
+        volatile V val;
+        volatile Node next;
 
-        Node(int hash, Object key, Object val, Node next) {
+        Node(int hash, Object key, V val, Node next) {
             this.hash = hash;
             this.key = key;
             this.val = val;
             this.next = next;
         }
-
-        /** CompareAndSet the hash field */
-        final boolean casHash(int cmp, int val) {
-            return UNSAFE.compareAndSwapInt(this, hashOffset, cmp, val);
-        }
-
-        /** The number of spins before blocking for a lock */
-        static final int MAX_SPINS =
-            Runtime.getRuntime().availableProcessors() > 1 ? 64 : 1;
-
-        /**
-         * Spins a while if LOCKED bit set and this node is the first
-         * of its bin, and then sets WAITING bits on hash field and
-         * blocks (once) if they are still set.  It is OK for this
-         * method to return even if lock is not available upon exit,
-         * which enables these simple single-wait mechanics.
-         *
-         * The corresponding signalling operation is performed within
-         * callers: Upon detecting that WAITING has been set when
-         * unlocking lock (via a failed CAS from non-waiting LOCKED
-         * state), unlockers acquire the sync lock and perform a
-         * notifyAll.
-         */
-        final void tryAwaitLock(Node[] tab, int i) {
-            if (tab != null && i >= 0 && i < tab.length) { // bounds check
-                int r = ThreadLocalRandom.current().nextInt(); // randomize spins
-                int spins = MAX_SPINS, h;
-                while (tabAt(tab, i) == this && ((h = hash) & LOCKED) != 0) {
-                    if (spins >= 0) {
-                        r ^= r << 1; r ^= r >>> 3; r ^= r << 10; // xorshift
-                        if (r >= 0 && --spins == 0)
-                            Thread.yield();  // yield before block
-                    }
-                    else if (casHash(h, h | WAITING)) {
-                        synchronized (this) {
-                            if (tabAt(tab, i) == this &&
-                                (hash & WAITING) == WAITING) {
-                                try {
-                                    wait();
-                                } catch (InterruptedException ie) {
-                                    Thread.currentThread().interrupt();
-                                }
-                            }
-                            else
-                                notifyAll(); // possibly won race vs signaller
-                        }
-                        break;
-                    }
-                }
-            }
-        }
-
-        // Unsafe mechanics for casHash
-        private static final sun.misc.Unsafe UNSAFE;
-        private static final long hashOffset;
-
-        static {
-            try {
-                UNSAFE = getUnsafe();
-                Class k = Node.class;
-                hashOffset = UNSAFE.objectFieldOffset
-                    (k.getDeclaredField("hash"));
-            } catch (Exception e) {
-                throw new Error(e);
-            }
-        }
     }
 
     /* ---------------- TreeBins -------------- */
@@ -621,14 +690,14 @@ public class ConcurrentHashMapV8
     /**
      * Nodes for use in TreeBins
      */
-    static final class TreeNode extends Node {
-        TreeNode parent;  // red-black tree links
-        TreeNode left;
-        TreeNode right;
-        TreeNode prev;    // needed to unlink next upon deletion
+    static final class TreeNode extends Node {
+        TreeNode parent;  // red-black tree links
+        TreeNode left;
+        TreeNode right;
+        TreeNode prev;    // needed to unlink next upon deletion
         boolean red;
 
-        TreeNode(int hash, Object key, Object val, Node next, TreeNode parent) {
+        TreeNode(int hash, Object key, V val, Node next, TreeNode parent) {
             super(hash, key, val, next);
             this.parent = parent;
         }
@@ -660,7 +729,7 @@ public class ConcurrentHashMapV8
      * TreeBins also maintain a separate locking discipline than
      * regular bins. Because they are forwarded via special MOVED
      * nodes at bin heads (which can never change once established),
-     * we cannot use use those nodes as locks. Instead, TreeBin
+     * we cannot use those nodes as locks. Instead, TreeBin
      * extends AbstractQueuedSynchronizer to support a simple form of
      * read-write lock. For update operations and table validation,
      * the exclusive form of lock behaves in the same way as bin-head
@@ -677,10 +746,10 @@ public class ConcurrentHashMapV8
      * and writers. Since we don't need to export full Lock API, we
      * just override the minimal AQS methods and use them directly.
      */
-    static final class TreeBin extends AbstractQueuedSynchronizer {
+    static final class TreeBin extends AbstractQueuedSynchronizer {
         private static final long serialVersionUID = 2249069246763182397L;
-        transient TreeNode root;  // root of tree
-        transient TreeNode first; // head of next-pointer list
+        transient TreeNode root;  // root of tree
+        transient TreeNode first; // head of next-pointer list
 
         /* AQS overrides */
         public final boolean isHeldExclusively() { return getState() > 0; }
@@ -711,9 +780,9 @@ public class ConcurrentHashMapV8
         }
 
         /** From CLR */
-        private void rotateLeft(TreeNode p) {
+        private void rotateLeft(TreeNode p) {
             if (p != null) {
-                TreeNode r = p.right, pp, rl;
+                TreeNode r = p.right, pp, rl;
                 if ((rl = p.right = r.left) != null)
                     rl.parent = p;
                 if ((pp = r.parent = p.parent) == null)
@@ -728,9 +797,9 @@ public class ConcurrentHashMapV8
         }
 
         /** From CLR */
-        private void rotateRight(TreeNode p) {
+        private void rotateRight(TreeNode p) {
             if (p != null) {
-                TreeNode l = p.left, pp, lr;
+                TreeNode l = p.left, pp, lr;
                 if ((lr = p.left = l.right) != null)
                     lr.parent = p;
                 if ((pp = l.parent = p.parent) == null)
@@ -745,11 +814,11 @@ public class ConcurrentHashMapV8
         }
 
         /**
-         * Return the TreeNode (or null if not found) for the given key
+         * Returns the TreeNode (or null if not found) for the given key
          * starting at given root.
          */
-        @SuppressWarnings("unchecked") // suppress Comparable cast warning
-        final TreeNode getTreeNode(int h, Object k, TreeNode p) {
+        @SuppressWarnings("unchecked") final TreeNode getTreeNode
+            (int h, Object k, TreeNode p) {
             Class c = k.getClass();
             while (p != null) {
                 int dir, ph;  Object pk; Class pc;
@@ -759,16 +828,17 @@ public class ConcurrentHashMapV8
                     if (c != (pc = pk.getClass()) ||
                         !(k instanceof Comparable) ||
                         (dir = ((Comparable)k).compareTo((Comparable)pk)) == 0) {
-                        dir = (c == pc) ? 0 : c.getName().compareTo(pc.getName());
-                        TreeNode r = null, s = null, pl, pr;
-                        if (dir >= 0) {
-                            if ((pl = p.left) != null && h <= pl.hash)
-                                s = pl;
+                        if ((dir = (c == pc) ? 0 :
+                             c.getName().compareTo(pc.getName())) == 0) {
+                            TreeNode r = null, pl, pr; // check both sides
+                            if ((pr = p.right) != null && h >= pr.hash &&
+                                (r = getTreeNode(h, k, pr)) != null)
+                                return r;
+                            else if ((pl = p.left) != null && h <= pl.hash)
+                                dir = -1;
+                            else // nothing there
+                                return null;
                         }
-                        else if ((pr = p.right) != null && h >= pr.hash)
-                            s = pr;
-                        if (s != null && (r = getTreeNode(h, k, s)) != null)
-                            return r;
                     }
                 }
                 else
@@ -783,10 +853,10 @@ public class ConcurrentHashMapV8
          * read-lock to call getTreeNode, but during failure to get
          * lock, searches along next links.
          */
-        final Object getValue(int h, Object k) {
-            Node r = null;
+        final V getValue(int h, Object k) {
+            Node r = null;
             int c = getState(); // Must read lock state first
-            for (Node e = first; e != null; e = e.next) {
+            for (Node e = first; e != null; e = e.next) {
                 if (c <= 0 && compareAndSetState(c, c - 1)) {
                     try {
                         r = getTreeNode(h, k, root);
@@ -795,7 +865,7 @@ public class ConcurrentHashMapV8
                     }
                     break;
                 }
-                else if ((e.hash & HASH_BITS) == h && k.equals(e.key)) {
+                else if (e.hash == h && k.equals(e.key)) {
                     r = e;
                     break;
                 }
@@ -806,13 +876,13 @@ public class ConcurrentHashMapV8
         }
 
         /**
-         * Find or add a node
+         * Finds or adds a node.
          * @return null if added
          */
-        @SuppressWarnings("unchecked") // suppress Comparable cast warning
-        final TreeNode putTreeNode(int h, Object k, Object v) {
+        @SuppressWarnings("unchecked") final TreeNode putTreeNode
+            (int h, Object k, V v) {
             Class c = k.getClass();
-            TreeNode pp = root, p = null;
+            TreeNode pp = root, p = null;
             int dir = 0;
             while (pp != null) { // find existing node or leaf to insert at
                 int ph;  Object pk; Class pc;
@@ -823,11 +893,14 @@ public class ConcurrentHashMapV8
                     if (c != (pc = pk.getClass()) ||
                         !(k instanceof Comparable) ||
                         (dir = ((Comparable)k).compareTo((Comparable)pk)) == 0) {
-                        dir = (c == pc) ? 0 : c.getName().compareTo(pc.getName());
-                        TreeNode r = null, s = null, pl, pr;
-                        if (dir >= 0) {
-                            if ((pl = p.left) != null && h <= pl.hash)
-                                s = pl;
+                        TreeNode s = null, r = null, pr;
+                        if ((dir = (c == pc) ? 0 :
+                             c.getName().compareTo(pc.getName())) == 0) {
+                            if ((pr = p.right) != null && h >= pr.hash &&
+                                (r = getTreeNode(h, k, pr)) != null)
+                                return r;
+                            else // continue left
+                                dir = -1;
                         }
                         else if ((pr = p.right) != null && h >= pr.hash)
                             s = pr;
@@ -840,12 +913,12 @@ public class ConcurrentHashMapV8
                 pp = (dir > 0) ? p.right : p.left;
             }
 
-            TreeNode f = first;
-            TreeNode x = first = new TreeNode(h, k, v, f, p);
+            TreeNode f = first;
+            TreeNode x = first = new TreeNode(h, k, v, f, p);
             if (p == null)
                 root = x;
             else { // attach and rebalance; adapted from CLR
-                TreeNode xp, xpp;
+                TreeNode xp, xpp;
                 if (f != null)
                     f.prev = x;
                 if (dir <= 0)
@@ -855,9 +928,9 @@ public class ConcurrentHashMapV8
                 x.red = true;
                 while (x != null && (xp = x.parent) != null && xp.red &&
                        (xpp = xp.parent) != null) {
-                    TreeNode xppl = xpp.left;
+                    TreeNode xppl = xpp.left;
                     if (xp == xppl) {
-                        TreeNode y = xpp.right;
+                        TreeNode y = xpp.right;
                         if (y != null && y.red) {
                             y.red = false;
                             xp.red = false;
@@ -879,7 +952,7 @@ public class ConcurrentHashMapV8
                         }
                     }
                     else {
-                        TreeNode y = xppl;
+                        TreeNode y = xppl;
                         if (y != null && y.red) {
                             y.red = false;
                             xp.red = false;
@@ -901,7 +974,7 @@ public class ConcurrentHashMapV8
                         }
                     }
                 }
-                TreeNode r = root;
+                TreeNode r = root;
                 if (r != null && r.red)
                     r.red = false;
             }
@@ -916,31 +989,31 @@ public class ConcurrentHashMapV8
          * that are accessible independently of lock. So instead we
          * swap the tree linkages.
          */
-        final void deleteTreeNode(TreeNode p) {
-            TreeNode next = (TreeNode)p.next; // unlink traversal pointers
-            TreeNode pred = p.prev;
+        final void deleteTreeNode(TreeNode p) {
+            TreeNode next = (TreeNode)p.next; // unlink traversal pointers
+            TreeNode pred = p.prev;
             if (pred == null)
                 first = next;
             else
                 pred.next = next;
             if (next != null)
                 next.prev = pred;
-            TreeNode replacement;
-            TreeNode pl = p.left;
-            TreeNode pr = p.right;
+            TreeNode replacement;
+            TreeNode pl = p.left;
+            TreeNode pr = p.right;
             if (pl != null && pr != null) {
-                TreeNode s = pr, sl;
+                TreeNode s = pr, sl;
                 while ((sl = s.left) != null) // find successor
                     s = sl;
                 boolean c = s.red; s.red = p.red; p.red = c; // swap colors
-                TreeNode sr = s.right;
-                TreeNode pp = p.parent;
+                TreeNode sr = s.right;
+                TreeNode pp = p.parent;
                 if (s == pr) { // p was s's direct parent
                     p.parent = s;
                     s.right = p;
                 }
                 else {
-                    TreeNode sp = s.parent;
+                    TreeNode sp = s.parent;
                     if ((p.parent = sp) != null) {
                         if (s == sp.left)
                             sp.left = p;
@@ -965,7 +1038,7 @@ public class ConcurrentHashMapV8
             }
             else
                 replacement = (pl != null) ? pl : pr;
-            TreeNode pp = p.parent;
+            TreeNode pp = p.parent;
             if (replacement == null) {
                 if (pp == null) {
                     root = null;
@@ -984,15 +1057,15 @@ public class ConcurrentHashMapV8
                 p.left = p.right = p.parent = null;
             }
             if (!p.red) { // rebalance, from CLR
-                TreeNode x = replacement;
+                TreeNode x = replacement;
                 while (x != null) {
-                    TreeNode xp, xpl;
+                    TreeNode xp, xpl;
                     if (x.red || (xp = x.parent) == null) {
                         x.red = false;
                         break;
                     }
                     if (x == (xpl = xp.left)) {
-                        TreeNode sib = xp.right;
+                        TreeNode sib = xp.right;
                         if (sib != null && sib.red) {
                             sib.red = false;
                             xp.red = true;
@@ -1002,7 +1075,7 @@ public class ConcurrentHashMapV8
                         if (sib == null)
                             x = xp;
                         else {
-                            TreeNode sl = sib.left, sr = sib.right;
+                            TreeNode sl = sib.left, sr = sib.right;
                             if ((sr == null || !sr.red) &&
                                 (sl == null || !sl.red)) {
                                 sib.red = true;
@@ -1014,7 +1087,8 @@ public class ConcurrentHashMapV8
                                         sl.red = false;
                                     sib.red = true;
                                     rotateRight(sib);
-                                    sib = (xp = x.parent) == null ? null : xp.right;
+                                    sib = (xp = x.parent) == null ?
+                                        null : xp.right;
                                 }
                                 if (sib != null) {
                                     sib.red = (xp == null) ? false : xp.red;
@@ -1030,7 +1104,7 @@ public class ConcurrentHashMapV8
                         }
                     }
                     else { // symmetric
-                        TreeNode sib = xpl;
+                        TreeNode sib = xpl;
                         if (sib != null && sib.red) {
                             sib.red = false;
                             xp.red = true;
@@ -1040,7 +1114,7 @@ public class ConcurrentHashMapV8
                         if (sib == null)
                             x = xp;
                         else {
-                            TreeNode sl = sib.left, sr = sib.right;
+                            TreeNode sl = sib.left, sr = sib.right;
                             if ((sl == null || !sl.red) &&
                                 (sr == null || !sr.red)) {
                                 sib.red = true;
@@ -1052,7 +1126,8 @@ public class ConcurrentHashMapV8
                                         sr.red = false;
                                     sib.red = true;
                                     rotateLeft(sib);
-                                    sib = (xp = x.parent) == null ? null : xp.left;
+                                    sib = (xp = x.parent) == null ?
+                                        null : xp.left;
                                 }
                                 if (sib != null) {
                                     sib.red = (xp == null) ? false : xp.red;
@@ -1082,7 +1157,7 @@ public class ConcurrentHashMapV8
     /* ---------------- Collision reduction methods -------------- */
 
     /**
-     * Spreads higher bits to lower, and also forces top 2 bits to 0.
+     * Spreads higher bits to lower, and also forces top bit to 0.
      * Because the table uses power-of-two masking, sets of hashes
      * that vary only in bits above the current mask will always
      * collide. (Among known examples are sets of Float keys holding
@@ -1100,37 +1175,35 @@ public class ConcurrentHashMapV8
     }
 
     /**
-     * Replaces a list bin with a tree bin. Call only when locked.
-     * Fails to replace if the given key is non-comparable or table
-     * is, or needs, resizing.
-     */
-    private final void replaceWithTreeBin(Node[] tab, int index, Object key) {
-        if ((key instanceof Comparable) &&
-            (tab.length >= MAXIMUM_CAPACITY || counter.sum() < (long)sizeCtl)) {
-            TreeBin t = new TreeBin();
-            for (Node e = tabAt(tab, index); e != null; e = e.next)
-                t.putTreeNode(e.hash & HASH_BITS, e.key, e.val);
-            setTabAt(tab, index, new Node(MOVED, t, null, null));
+     * Replaces a list bin with a tree bin if key is comparable.  Call
+     * only when locked.
+     */
+    private final void replaceWithTreeBin(Node[] tab, int index, Object key) {
+        if (key instanceof Comparable) {
+            TreeBin t = new TreeBin();
+            for (Node e = tabAt(tab, index); e != null; e = e.next)
+                t.putTreeNode(e.hash, e.key, e.val);
+            setTabAt(tab, index, new Node(MOVED, t, null, null));
         }
     }
 
     /* ---------------- Internal access and update methods -------------- */
 
     /** Implementation for get and containsKey */
-    private final Object internalGet(Object k) {
+    @SuppressWarnings("unchecked") private final V internalGet(Object k) {
         int h = spread(k.hashCode());
-        retry: for (Node[] tab = table; tab != null;) {
-            Node e, p; Object ek, ev; int eh;      // locals to read fields once
+        retry: for (Node[] tab = table; tab != null;) {
+            Node e; Object ek; V ev; int eh; // locals to read fields once
             for (e = tabAt(tab, (tab.length - 1) & h); e != null; e = e.next) {
-                if ((eh = e.hash) == MOVED) {
+                if ((eh = e.hash) < 0) {
                     if ((ek = e.key) instanceof TreeBin)  // search TreeBin
-                        return ((TreeBin)ek).getValue(h, k);
-                    else {                        // restart with new table
-                        tab = (Node[])ek;
+                        return ((TreeBin)ek).getValue(h, k);
+                    else {                      // restart with new table
+                        tab = (Node[])ek;
                         continue retry;
                     }
                 }
-                else if ((eh & HASH_BITS) == h && (ev = e.val) != null &&
+                else if (eh == h && (ev = e.val) != null &&
                          ((ek = e.key) == k || k.equals(ek)))
                     return ev;
             }
@@ -1144,26 +1217,27 @@ public class ConcurrentHashMapV8
      * Replaces node value with v, conditional upon match of cv if
      * non-null.  If resulting value is null, delete.
      */
-    private final Object internalReplace(Object k, Object v, Object cv) {
+    @SuppressWarnings("unchecked") private final V internalReplace
+        (Object k, V v, Object cv) {
         int h = spread(k.hashCode());
-        Object oldVal = null;
-        for (Node[] tab = table;;) {
-            Node f; int i, fh; Object fk;
+        V oldVal = null;
+        for (Node[] tab = table;;) {
+            Node f; int i, fh; Object fk;
             if (tab == null ||
                 (f = tabAt(tab, i = (tab.length - 1) & h)) == null)
                 break;
-            else if ((fh = f.hash) == MOVED) {
+            else if ((fh = f.hash) < 0) {
                 if ((fk = f.key) instanceof TreeBin) {
-                    TreeBin t = (TreeBin)fk;
+                    TreeBin t = (TreeBin)fk;
                     boolean validated = false;
                     boolean deleted = false;
                     t.acquire(0);
                     try {
                         if (tabAt(tab, i) == f) {
                             validated = true;
-                            TreeNode p = t.getTreeNode(h, k, t.root);
+                            TreeNode p = t.getTreeNode(h, k, t.root);
                             if (p != null) {
-                                Object pv = p.val;
+                                V pv = p.val;
                                 if (cv == null || cv == pv || cv.equals(pv)) {
                                     oldVal = pv;
                                     if ((p.val = v) == null) {
@@ -1178,35 +1252,31 @@ public class ConcurrentHashMapV8
                     }
                     if (validated) {
                         if (deleted)
-                            counter.add(-1L);
+                            addCount(-1L, -1);
                         break;
                     }
                 }
                 else
-                    tab = (Node[])fk;
+                    tab = (Node[])fk;
             }
-            else if ((fh & HASH_BITS) != h && f.next == null) // precheck
+            else if (fh != h && f.next == null) // precheck
                 break;                          // rules out possible existence
-            else if ((fh & LOCKED) != 0) {
-                checkForResize();               // try resizing if can't get lock
-                f.tryAwaitLock(tab, i);
-            }
-            else if (f.casHash(fh, fh | LOCKED)) {
+            else {
                 boolean validated = false;
                 boolean deleted = false;
-                try {
+                synchronized (f) {
                     if (tabAt(tab, i) == f) {
                         validated = true;
-                        for (Node e = f, pred = null;;) {
-                            Object ek, ev;
-                            if ((e.hash & HASH_BITS) == h &&
+                        for (Node e = f, pred = null;;) {
+                            Object ek; V ev;
+                            if (e.hash == h &&
                                 ((ev = e.val) != null) &&
                                 ((ek = e.key) == k || k.equals(ek))) {
                                 if (cv == null || cv == ev || cv.equals(ev)) {
                                     oldVal = ev;
                                     if ((e.val = v) == null) {
                                         deleted = true;
-                                        Node en = e.next;
+                                        Node en = e.next;
                                         if (pred != null)
                                             pred.next = en;
                                         else
@@ -1220,15 +1290,10 @@ public class ConcurrentHashMapV8
                                 break;
                         }
                     }
-                } finally {
-                    if (!f.casHash(fh | LOCKED, fh)) {
-                        f.hash = fh;
-                        synchronized (f) { f.notifyAll(); };
-                    }
                 }
                 if (validated) {
                     if (deleted)
-                        counter.add(-1L);
+                        addCount(-1L, -1);
                     break;
                 }
             }
@@ -1237,393 +1302,353 @@ public class ConcurrentHashMapV8
     }
 
     /*
-     * Internal versions of the five insertion methods, each a
-     * little more complicated than the last. All have
-     * the same basic structure as the first (internalPut):
+     * Internal versions of insertion methods
+     * All have the same basic structure as the first (internalPut):
      *  1. If table uninitialized, create
      *  2. If bin empty, try to CAS new node
      *  3. If bin stale, use new table
      *  4. if bin converted to TreeBin, validate and relay to TreeBin methods
      *  5. Lock and validate; if valid, scan and add or update
      *
-     * The others interweave other checks and/or alternative actions:
-     *  * Plain put checks for and performs resize after insertion.
-     *  * putIfAbsent prescans for mapping without lock (and fails to add
-     *    if present), which also makes pre-emptive resize checks worthwhile.
-     *  * computeIfAbsent extends form used in putIfAbsent with additional
-     *    mechanics to deal with, calls, potential exceptions and null
-     *    returns from function call.
-     *  * compute uses the same function-call mechanics, but without
-     *    the prescans
-     *  * putAll attempts to pre-allocate enough table space
-     *    and more lazily performs count updates and checks.
-     *
-     * Someday when details settle down a bit more, it might be worth
-     * some factoring to reduce sprawl.
+     * The putAll method differs mainly in attempting to pre-allocate
+     * enough table space, and also more lazily performs count updates
+     * and checks.
+     *
+     * Most of the function-accepting methods can't be factored nicely
+     * because they require different functional forms, so instead
+     * sprawl out similar mechanics.
      */
 
-    /** Implementation for put */
-    private final Object internalPut(Object k, Object v) {
+    /** Implementation for put and putIfAbsent */
+    @SuppressWarnings("unchecked") private final V internalPut
+        (K k, V v, boolean onlyIfAbsent) {
+        if (k == null || v == null) throw new NullPointerException();
         int h = spread(k.hashCode());
-        int count = 0;
-        for (Node[] tab = table;;) {
-            int i; Node f; int fh; Object fk;
+        int len = 0;
+        for (Node[] tab = table;;) {
+            int i, fh; Node f; Object fk; V fv;
             if (tab == null)
                 tab = initTable();
             else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
-                if (casTabAt(tab, i, null, new Node(h, k, v, null)))
+                if (casTabAt(tab, i, null, new Node(h, k, v, null)))
                     break;                   // no lock when adding to empty bin
             }
-            else if ((fh = f.hash) == MOVED) {
+            else if ((fh = f.hash) < 0) {
                 if ((fk = f.key) instanceof TreeBin) {
-                    TreeBin t = (TreeBin)fk;
-                    Object oldVal = null;
+                    TreeBin t = (TreeBin)fk;
+                    V oldVal = null;
                     t.acquire(0);
                     try {
                         if (tabAt(tab, i) == f) {
-                            count = 2;
-                            TreeNode p = t.putTreeNode(h, k, v);
+                            len = 2;
+                            TreeNode p = t.putTreeNode(h, k, v);
                             if (p != null) {
                                 oldVal = p.val;
-                                p.val = v;
+                                if (!onlyIfAbsent)
+                                    p.val = v;
                             }
                         }
                     } finally {
                         t.release(0);
                     }
-                    if (count != 0) {
+                    if (len != 0) {
                         if (oldVal != null)
                             return oldVal;
                         break;
                     }
                 }
                 else
-                    tab = (Node[])fk;
+                    tab = (Node[])fk;
             }
-            else if ((fh & LOCKED) != 0) {
-                checkForResize();
-                f.tryAwaitLock(tab, i);
-            }
-            else if (f.casHash(fh, fh | LOCKED)) {
-                Object oldVal = null;
-                try {                        // needed in case equals() throws
+            else if (onlyIfAbsent && fh == h && (fv = f.val) != null &&
+                     ((fk = f.key) == k || k.equals(fk))) // peek while nearby
+                return fv;
+            else {
+                V oldVal = null;
+                synchronized (f) {
                     if (tabAt(tab, i) == f) {
-                        count = 1;
-                        for (Node e = f;; ++count) {
-                            Object ek, ev;
-                            if ((e.hash & HASH_BITS) == h &&
+                        len = 1;
+                        for (Node e = f;; ++len) {
+                            Object ek; V ev;
+                            if (e.hash == h &&
                                 (ev = e.val) != null &&
                                 ((ek = e.key) == k || k.equals(ek))) {
                                 oldVal = ev;
-                                e.val = v;
+                                if (!onlyIfAbsent)
+                                    e.val = v;
                                 break;
                             }
-                            Node last = e;
+                            Node last = e;
                             if ((e = e.next) == null) {
-                                last.next = new Node(h, k, v, null);
-                                if (count >= TREE_THRESHOLD)
+                                last.next = new Node(h, k, v, null);
+                                if (len >= TREE_THRESHOLD)
                                     replaceWithTreeBin(tab, i, k);
                                 break;
                             }
                         }
                     }
-                } finally {                  // unlock and signal if needed
-                    if (!f.casHash(fh | LOCKED, fh)) {
-                        f.hash = fh;
-                        synchronized (f) { f.notifyAll(); };
-                    }
                 }
-                if (count != 0) {
+                if (len != 0) {
                     if (oldVal != null)
                         return oldVal;
-                    if (tab.length <= 64)
-                        count = 2;
                     break;
                 }
             }
         }
-        counter.add(1L);
-        if (count > 1)
-            checkForResize();
+        addCount(1L, len);
         return null;
     }
 
-    /** Implementation for putIfAbsent */
-    private final Object internalPutIfAbsent(Object k, Object v) {
+    /** Implementation for computeIfAbsent */
+    @SuppressWarnings("unchecked") private final V internalComputeIfAbsent
+        (K k, Fun mf) {
+        if (k == null || mf == null)
+            throw new NullPointerException();
         int h = spread(k.hashCode());
-        int count = 0;
-        for (Node[] tab = table;;) {
-            int i; Node f; int fh; Object fk, fv;
+        V val = null;
+        int len = 0;
+        for (Node[] tab = table;;) {
+            Node f; int i; Object fk;
             if (tab == null)
                 tab = initTable();
             else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
-                if (casTabAt(tab, i, null, new Node(h, k, v, null)))
+                Node node = new Node(h, k, null, null);
+                synchronized (node) {
+                    if (casTabAt(tab, i, null, node)) {
+                        len = 1;
+                        try {
+                            if ((val = mf.apply(k)) != null)
+                                node.val = val;
+                        } finally {
+                            if (val == null)
+                                setTabAt(tab, i, null);
+                        }
+                    }
+                }
+                if (len != 0)
                     break;
             }
-            else if ((fh = f.hash) == MOVED) {
+            else if (f.hash < 0) {
                 if ((fk = f.key) instanceof TreeBin) {
-                    TreeBin t = (TreeBin)fk;
-                    Object oldVal = null;
+                    TreeBin t = (TreeBin)fk;
+                    boolean added = false;
                     t.acquire(0);
                     try {
                         if (tabAt(tab, i) == f) {
-                            count = 2;
-                            TreeNode p = t.putTreeNode(h, k, v);
+                            len = 1;
+                            TreeNode p = t.getTreeNode(h, k, t.root);
                             if (p != null)
-                                oldVal = p.val;
+                                val = p.val;
+                            else if ((val = mf.apply(k)) != null) {
+                                added = true;
+                                len = 2;
+                                t.putTreeNode(h, k, val);
+                            }
                         }
                     } finally {
                         t.release(0);
                     }
-                    if (count != 0) {
-                        if (oldVal != null)
-                            return oldVal;
+                    if (len != 0) {
+                        if (!added)
+                            return val;
                         break;
                     }
                 }
                 else
-                    tab = (Node[])fk;
+                    tab = (Node[])fk;
             }
-            else if ((fh & HASH_BITS) == h && (fv = f.val) != null &&
-                     ((fk = f.key) == k || k.equals(fk)))
-                return fv;
             else {
-                Node g = f.next;
-                if (g != null) { // at least 2 nodes -- search and maybe resize
-                    for (Node e = g;;) {
-                        Object ek, ev;
-                        if ((e.hash & HASH_BITS) == h && (ev = e.val) != null &&
-                            ((ek = e.key) == k || k.equals(ek)))
-                            return ev;
-                        if ((e = e.next) == null) {
-                            checkForResize();
-                            break;
-                        }
-                    }
+                for (Node e = f; e != null; e = e.next) { // prescan
+                    Object ek; V ev;
+                    if (e.hash == h && (ev = e.val) != null &&
+                        ((ek = e.key) == k || k.equals(ek)))
+                        return ev;
                 }
-                if (((fh = f.hash) & LOCKED) != 0) {
-                    checkForResize();
-                    f.tryAwaitLock(tab, i);
-                }
-                else if (tabAt(tab, i) == f && f.casHash(fh, fh | LOCKED)) {
-                    Object oldVal = null;
-                    try {
-                        if (tabAt(tab, i) == f) {
-                            count = 1;
-                            for (Node e = f;; ++count) {
-                                Object ek, ev;
-                                if ((e.hash & HASH_BITS) == h &&
-                                    (ev = e.val) != null &&
-                                    ((ek = e.key) == k || k.equals(ek))) {
-                                    oldVal = ev;
-                                    break;
-                                }
-                                Node last = e;
-                                if ((e = e.next) == null) {
-                                    last.next = new Node(h, k, v, null);
-                                    if (count >= TREE_THRESHOLD)
+                boolean added = false;
+                synchronized (f) {
+                    if (tabAt(tab, i) == f) {
+                        len = 1;
+                        for (Node e = f;; ++len) {
+                            Object ek; V ev;
+                            if (e.hash == h &&
+                                (ev = e.val) != null &&
+                                ((ek = e.key) == k || k.equals(ek))) {
+                                val = ev;
+                                break;
+                            }
+                            Node last = e;
+                            if ((e = e.next) == null) {
+                                if ((val = mf.apply(k)) != null) {
+                                    added = true;
+                                    last.next = new Node(h, k, val, null);
+                                    if (len >= TREE_THRESHOLD)
                                         replaceWithTreeBin(tab, i, k);
-                                    break;
                                 }
+                                break;
                             }
                         }
-                    } finally {
-                        if (!f.casHash(fh | LOCKED, fh)) {
-                            f.hash = fh;
-                            synchronized (f) { f.notifyAll(); };
-                        }
-                    }
-                    if (count != 0) {
-                        if (oldVal != null)
-                            return oldVal;
-                        if (tab.length <= 64)
-                            count = 2;
-                        break;
                     }
                 }
+                if (len != 0) {
+                    if (!added)
+                        return val;
+                    break;
+                }
             }
         }
-        counter.add(1L);
-        if (count > 1)
-            checkForResize();
-        return null;
+        if (val != null)
+            addCount(1L, len);
+        return val;
     }
 
-    /** Implementation for computeIfAbsent */
-    private final Object internalComputeIfAbsent(K k,
-                                                 MappingFunction mf) {
+    /** Implementation for compute */
+    @SuppressWarnings("unchecked") private final V internalCompute
+        (K k, boolean onlyIfPresent,
+         BiFun mf) {
+        if (k == null || mf == null)
+            throw new NullPointerException();
         int h = spread(k.hashCode());
-        Object val = null;
-        int count = 0;
-        for (Node[] tab = table;;) {
-            Node f; int i, fh; Object fk, fv;
+        V val = null;
+        int delta = 0;
+        int len = 0;
+        for (Node[] tab = table;;) {
+            Node f; int i, fh; Object fk;
             if (tab == null)
                 tab = initTable();
             else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
-                Node node = new Node(fh = h | LOCKED, k, null, null);
-                if (casTabAt(tab, i, null, node)) {
-                    count = 1;
-                    try {
-                        if ((val = mf.map(k)) != null)
-                            node.val = val;
-                    } finally {
-                        if (val == null)
-                            setTabAt(tab, i, null);
-                        if (!node.casHash(fh, h)) {
-                            node.hash = h;
-                            synchronized (node) { node.notifyAll(); };
+                if (onlyIfPresent)
+                    break;
+                Node node = new Node(h, k, null, null);
+                synchronized (node) {
+                    if (casTabAt(tab, i, null, node)) {
+                        try {
+                            len = 1;
+                            if ((val = mf.apply(k, null)) != null) {
+                                node.val = val;
+                                delta = 1;
+                            }
+                        } finally {
+                            if (delta == 0)
+                                setTabAt(tab, i, null);
                         }
                     }
                 }
-                if (count != 0)
+                if (len != 0)
                     break;
             }
-            else if ((fh = f.hash) == MOVED) {
+            else if ((fh = f.hash) < 0) {
                 if ((fk = f.key) instanceof TreeBin) {
-                    TreeBin t = (TreeBin)fk;
-                    boolean added = false;
+                    TreeBin t = (TreeBin)fk;
                     t.acquire(0);
                     try {
                         if (tabAt(tab, i) == f) {
-                            count = 1;
-                            TreeNode p = t.getTreeNode(h, k, t.root);
-                            if (p != null)
-                                val = p.val;
-                            else if ((val = mf.map(k)) != null) {
-                                added = true;
-                                count = 2;
-                                t.putTreeNode(h, k, val);
+                            len = 1;
+                            TreeNode p = t.getTreeNode(h, k, t.root);
+                            if (p == null && onlyIfPresent)
+                                break;
+                            V pv = (p == null) ? null : p.val;
+                            if ((val = mf.apply(k, pv)) != null) {
+                                if (p != null)
+                                    p.val = val;
+                                else {
+                                    len = 2;
+                                    delta = 1;
+                                    t.putTreeNode(h, k, val);
+                                }
+                            }
+                            else if (p != null) {
+                                delta = -1;
+                                t.deleteTreeNode(p);
                             }
                         }
                     } finally {
                         t.release(0);
                     }
-                    if (count != 0) {
-                        if (!added)
-                            return val;
+                    if (len != 0)
                         break;
-                    }
                 }
                 else
-                    tab = (Node[])fk;
+                    tab = (Node[])fk;
             }
-            else if ((fh & HASH_BITS) == h && (fv = f.val) != null &&
-                     ((fk = f.key) == k || k.equals(fk)))
-                return fv;
             else {
-                Node g = f.next;
-                if (g != null) {
-                    for (Node e = g;;) {
-                        Object ek, ev;
-                        if ((e.hash & HASH_BITS) == h && (ev = e.val) != null &&
-                            ((ek = e.key) == k || k.equals(ek)))
-                            return ev;
-                        if ((e = e.next) == null) {
-                            checkForResize();
-                            break;
-                        }
-                    }
-                }
-                if (((fh = f.hash) & LOCKED) != 0) {
-                    checkForResize();
-                    f.tryAwaitLock(tab, i);
-                }
-                else if (tabAt(tab, i) == f && f.casHash(fh, fh | LOCKED)) {
-                    boolean added = false;
-                    try {
-                        if (tabAt(tab, i) == f) {
-                            count = 1;
-                            for (Node e = f;; ++count) {
-                                Object ek, ev;
-                                if ((e.hash & HASH_BITS) == h &&
-                                    (ev = e.val) != null &&
-                                    ((ek = e.key) == k || k.equals(ek))) {
-                                    val = ev;
-                                    break;
+                synchronized (f) {
+                    if (tabAt(tab, i) == f) {
+                        len = 1;
+                        for (Node e = f, pred = null;; ++len) {
+                            Object ek; V ev;
+                            if (e.hash == h &&
+                                (ev = e.val) != null &&
+                                ((ek = e.key) == k || k.equals(ek))) {
+                                val = mf.apply(k, ev);
+                                if (val != null)
+                                    e.val = val;
+                                else {
+                                    delta = -1;
+                                    Node en = e.next;
+                                    if (pred != null)
+                                        pred.next = en;
+                                    else
+                                        setTabAt(tab, i, en);
                                 }
-                                Node last = e;
-                                if ((e = e.next) == null) {
-                                    if ((val = mf.map(k)) != null) {
-                                        added = true;
-                                        last.next = new Node(h, k, val, null);
-                                        if (count >= TREE_THRESHOLD)
-                                            replaceWithTreeBin(tab, i, k);
-                                    }
-                                    break;
+                                break;
+                            }
+                            pred = e;
+                            if ((e = e.next) == null) {
+                                if (!onlyIfPresent &&
+                                    (val = mf.apply(k, null)) != null) {
+                                    pred.next = new Node(h, k, val, null);
+                                    delta = 1;
+                                    if (len >= TREE_THRESHOLD)
+                                        replaceWithTreeBin(tab, i, k);
                                 }
+                                break;
                             }
                         }
-                    } finally {
-                        if (!f.casHash(fh | LOCKED, fh)) {
-                            f.hash = fh;
-                            synchronized (f) { f.notifyAll(); };
-                        }
-                    }
-                    if (count != 0) {
-                        if (!added)
-                            return val;
-                        if (tab.length <= 64)
-                            count = 2;
-                        break;
                     }
                 }
+                if (len != 0)
+                    break;
             }
         }
-        if (val != null) {
-            counter.add(1L);
-            if (count > 1)
-                checkForResize();
-        }
+        if (delta != 0)
+            addCount((long)delta, len);
         return val;
     }
 
-    /** Implementation for compute */
-    @SuppressWarnings("unchecked")
-    private final Object internalCompute(K k,
-                                         RemappingFunction mf) {
+    /** Implementation for merge */
+    @SuppressWarnings("unchecked") private final V internalMerge
+        (K k, V v, BiFun mf) {
+        if (k == null || v == null || mf == null)
+            throw new NullPointerException();
         int h = spread(k.hashCode());
-        Object val = null;
+        V val = null;
         int delta = 0;
-        int count = 0;
-        for (Node[] tab = table;;) {
-            Node f; int i, fh; Object fk;
+        int len = 0;
+        for (Node[] tab = table;;) {
+            int i; Node f; Object fk; V fv;
             if (tab == null)
                 tab = initTable();
             else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
-                Node node = new Node(fh = h | LOCKED, k, null, null);
-                if (casTabAt(tab, i, null, node)) {
-                    try {
-                        count = 1;
-                        if ((val = mf.remap(k, null)) != null) {
-                            node.val = val;
-                            delta = 1;
-                        }
-                    } finally {
-                        if (delta == 0)
-                            setTabAt(tab, i, null);
-                        if (!node.casHash(fh, h)) {
-                            node.hash = h;
-                            synchronized (node) { node.notifyAll(); };
-                        }
-                    }
-                }
-                if (count != 0)
+                if (casTabAt(tab, i, null, new Node(h, k, v, null))) {
+                    delta = 1;
+                    val = v;
                     break;
+                }
             }
-            else if ((fh = f.hash) == MOVED) {
+            else if (f.hash < 0) {
                 if ((fk = f.key) instanceof TreeBin) {
-                    TreeBin t = (TreeBin)fk;
+                    TreeBin t = (TreeBin)fk;
                     t.acquire(0);
                     try {
                         if (tabAt(tab, i) == f) {
-                            count = 1;
-                            TreeNode p = t.getTreeNode(h, k, t.root);
-                            Object pv = (p == null) ? null : p.val;
-                            if ((val = mf.remap(k, (V)pv)) != null) {
+                            len = 1;
+                            TreeNode p = t.getTreeNode(h, k, t.root);
+                            val = (p == null) ? v : mf.apply(p.val, v);
+                            if (val != null) {
                                 if (p != null)
                                     p.val = val;
                                 else {
-                                    count = 2;
+                                    len = 2;
                                     delta = 1;
                                     t.putTreeNode(h, k, val);
                                 }
@@ -1636,31 +1661,27 @@ public class ConcurrentHashMapV8
                     } finally {
                         t.release(0);
                     }
-                    if (count != 0)
+                    if (len != 0)
                         break;
                 }
                 else
-                    tab = (Node[])fk;
+                    tab = (Node[])fk;
             }
-            else if ((fh & LOCKED) != 0) {
-                checkForResize();
-                f.tryAwaitLock(tab, i);
-            }
-            else if (f.casHash(fh, fh | LOCKED)) {
-                try {
+            else {
+                synchronized (f) {
                     if (tabAt(tab, i) == f) {
-                        count = 1;
-                        for (Node e = f, pred = null;; ++count) {
-                            Object ek, ev;
-                            if ((e.hash & HASH_BITS) == h &&
+                        len = 1;
+                        for (Node e = f, pred = null;; ++len) {
+                            Object ek; V ev;
+                            if (e.hash == h &&
                                 (ev = e.val) != null &&
                                 ((ek = e.key) == k || k.equals(ek))) {
-                                val = mf.remap(k, (V)ev);
+                                val = mf.apply(ev, v);
                                 if (val != null)
                                     e.val = val;
                                 else {
                                     delta = -1;
-                                    Node en = e.next;
+                                    Node en = e.next;
                                     if (pred != null)
                                         pred.next = en;
                                     else
@@ -1670,70 +1691,59 @@ public class ConcurrentHashMapV8
                             }
                             pred = e;
                             if ((e = e.next) == null) {
-                                if ((val = mf.remap(k, null)) != null) {
-                                    pred.next = new Node(h, k, val, null);
-                                    delta = 1;
-                                    if (count >= TREE_THRESHOLD)
-                                        replaceWithTreeBin(tab, i, k);
-                                }
+                                val = v;
+                                pred.next = new Node(h, k, val, null);
+                                delta = 1;
+                                if (len >= TREE_THRESHOLD)
+                                    replaceWithTreeBin(tab, i, k);
                                 break;
                             }
                         }
                     }
-                } finally {
-                    if (!f.casHash(fh | LOCKED, fh)) {
-                        f.hash = fh;
-                        synchronized (f) { f.notifyAll(); };
-                    }
                 }
-                if (count != 0) {
-                    if (tab.length <= 64)
-                        count = 2;
+                if (len != 0)
                     break;
-                }
             }
         }
-        if (delta != 0) {
-            counter.add((long)delta);
-            if (count > 1)
-                checkForResize();
-        }
+        if (delta != 0)
+            addCount((long)delta, len);
         return val;
     }
 
     /** Implementation for putAll */
-    private final void internalPutAll(Map m) {
+    @SuppressWarnings("unchecked") private final void internalPutAll
+        (Map m) {
         tryPresize(m.size());
         long delta = 0L;     // number of uncommitted additions
         boolean npe = false; // to throw exception on exit for nulls
         try {                // to clean up counts on other exceptions
-            for (Map.Entry entry : m.entrySet()) {
-                Object k, v;
+            for (Map.Entry entry : m.entrySet()) {
+                Object k; V v;
                 if (entry == null || (k = entry.getKey()) == null ||
                     (v = entry.getValue()) == null) {
                     npe = true;
                     break;
                 }
                 int h = spread(k.hashCode());
-                for (Node[] tab = table;;) {
-                    int i; Node f; int fh; Object fk;
+                for (Node[] tab = table;;) {
+                    int i; Node f; int fh; Object fk;
                     if (tab == null)
                         tab = initTable();
                     else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null){
-                        if (casTabAt(tab, i, null, new Node(h, k, v, null))) {
+                        if (casTabAt(tab, i, null, new Node(h, k, v, null))) {
                             ++delta;
                             break;
                         }
                     }
-                    else if ((fh = f.hash) == MOVED) {
+                    else if ((fh = f.hash) < 0) {
                         if ((fk = f.key) instanceof TreeBin) {
-                            TreeBin t = (TreeBin)fk;
+                            TreeBin t = (TreeBin)fk;
                             boolean validated = false;
                             t.acquire(0);
                             try {
                                 if (tabAt(tab, i) == f) {
                                     validated = true;
-                                    TreeNode p = t.getTreeNode(h, k, t.root);
+                                    TreeNode p = t.getTreeNode(h, k, t.root);
                                     if (p != null)
                                         p.val = v;
                                     else {
@@ -1748,48 +1758,36 @@ public class ConcurrentHashMapV8
                                 break;
                         }
                         else
-                            tab = (Node[])fk;
+                            tab = (Node[])fk;
                     }
-                    else if ((fh & LOCKED) != 0) {
-                        counter.add(delta);
-                        delta = 0L;
-                        checkForResize();
-                        f.tryAwaitLock(tab, i);
-                    }
-                    else if (f.casHash(fh, fh | LOCKED)) {
-                        int count = 0;
-                        try {
+                    else {
+                        int len = 0;
+                        synchronized (f) {
                             if (tabAt(tab, i) == f) {
-                                count = 1;
-                                for (Node e = f;; ++count) {
-                                    Object ek, ev;
-                                    if ((e.hash & HASH_BITS) == h &&
+                                len = 1;
+                                for (Node e = f;; ++len) {
+                                    Object ek; V ev;
+                                    if (e.hash == h &&
                                         (ev = e.val) != null &&
                                         ((ek = e.key) == k || k.equals(ek))) {
                                         e.val = v;
                                         break;
                                     }
-                                    Node last = e;
+                                    Node last = e;
                                     if ((e = e.next) == null) {
                                         ++delta;
-                                        last.next = new Node(h, k, v, null);
-                                        if (count >= TREE_THRESHOLD)
+                                        last.next = new Node(h, k, v, null);
+                                        if (len >= TREE_THRESHOLD)
                                             replaceWithTreeBin(tab, i, k);
                                         break;
                                     }
                                 }
                             }
-                        } finally {
-                            if (!f.casHash(fh | LOCKED, fh)) {
-                                f.hash = fh;
-                                synchronized (f) { f.notifyAll(); };
-                            }
                         }
-                        if (count != 0) {
-                            if (count > 1) {
-                                counter.add(delta);
+                        if (len != 0) {
+                            if (len > 1) {
+                                addCount(delta, len);
                                 delta = 0L;
-                                checkForResize();
                             }
                             break;
                         }
@@ -1797,13 +1795,68 @@ public class ConcurrentHashMapV8
                 }
             }
         } finally {
-            if (delta != 0)
-                counter.add(delta);
+            if (delta != 0L)
+                addCount(delta, 2);
         }
         if (npe)
             throw new NullPointerException();
     }
 
+    /**
+     * Implementation for clear. Steps through each bin, removing all
+     * nodes.
+     */
+    @SuppressWarnings("unchecked") private final void internalClear() {
+        long delta = 0L; // negative number of deletions
+        int i = 0;
+        Node[] tab = table;
+        while (tab != null && i < tab.length) {
+            Node f = tabAt(tab, i);
+            if (f == null)
+                ++i;
+            else if (f.hash < 0) {
+                Object fk;
+                if ((fk = f.key) instanceof TreeBin) {
+                    TreeBin t = (TreeBin)fk;
+                    t.acquire(0);
+                    try {
+                        if (tabAt(tab, i) == f) {
+                            for (Node p = t.first; p != null; p = p.next) {
+                                if (p.val != null) { // (currently always true)
+                                    p.val = null;
+                                    --delta;
+                                }
+                            }
+                            t.first = null;
+                            t.root = null;
+                            ++i;
+                        }
+                    } finally {
+                        t.release(0);
+                    }
+                }
+                else
+                    tab = (Node[])fk;
+            }
+            else {
+                synchronized (f) {
+                    if (tabAt(tab, i) == f) {
+                        for (Node e = f; e != null; e = e.next) {
+                            if (e.val != null) {  // (currently always true)
+                                e.val = null;
+                                --delta;
+                            }
+                        }
+                        setTabAt(tab, i, null);
+                        ++i;
+                    }
+                }
+            }
+        }
+        if (delta != 0L)
+            addCount(delta, -1);
+    }
+
     /* ---------------- Table Initialization and Resizing -------------- */
 
     /**
@@ -1823,16 +1876,17 @@ public class ConcurrentHashMapV8
     /**
      * Initializes table, using the size recorded in sizeCtl.
      */
-    private final Node[] initTable() {
-        Node[] tab; int sc;
+    @SuppressWarnings("unchecked") private final Node[] initTable() {
+        Node[] tab; int sc;
         while ((tab = table) == null) {
             if ((sc = sizeCtl) < 0)
                 Thread.yield(); // lost initialization race; just spin
-            else if (UNSAFE.compareAndSwapInt(this, sizeCtlOffset, sc, -1)) {
+            else if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
                 try {
                     if ((tab = table) == null) {
                         int n = (sc > 0) ? sc : DEFAULT_CAPACITY;
-                        tab = table = new Node[n];
+                        @SuppressWarnings("rawtypes") Node[] tb = new Node[n];
+                        table = tab = (Node[])tb;
                         sc = n - (n >>> 2);
                     }
                 } finally {
@@ -1845,24 +1899,47 @@ public class ConcurrentHashMapV8
     }
 
     /**
-     * If table is too small and not already resizing, creates next
-     * table and transfers bins.  Rechecks occupancy after a transfer
-     * to see if another resize is already needed because resizings
-     * are lagging additions.
-     */
-    private final void checkForResize() {
-        Node[] tab; int n, sc;
-        while ((tab = table) != null &&
-               (n = tab.length) < MAXIMUM_CAPACITY &&
-               (sc = sizeCtl) >= 0 && counter.sum() >= (long)sc &&
-               UNSAFE.compareAndSwapInt(this, sizeCtlOffset, sc, -1)) {
-            try {
-                if (tab == table) {
-                    table = rebuild(tab);
-                    sc = (n << 1) - (n >>> 1);
+     * Adds to count, and if table is too small and not already
+     * resizing, initiates transfer. If already resizing, helps
+     * perform transfer if work is available.  Rechecks occupancy
+     * after a transfer to see if another resize is already needed
+     * because resizings are lagging additions.
+     *
+     * @param x the count to add
+     * @param check if <0, don't check resize, if <= 1 only check if uncontended
+     */
+    private final void addCount(long x, int check) {
+        CounterCell[] as; long b, s;
+        if ((as = counterCells) != null ||
+            !U.compareAndSwapLong(this, BASECOUNT, b = baseCount, s = b + x)) {
+            CounterHashCode hc; CounterCell a; long v; int m;
+            boolean uncontended = true;
+            if ((hc = threadCounterHashCode.get()) == null ||
+                as == null || (m = as.length - 1) < 0 ||
+                (a = as[m & hc.code]) == null ||
+                !(uncontended =
+                  U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))) {
+                fullAddCount(x, hc, uncontended);
+                return;
+            }
+            if (check <= 1)
+                return;
+            s = sumCount();
+        }
+        if (check >= 0) {
+            Node[] tab, nt; int sc;
+            while (s >= (long)(sc = sizeCtl) && (tab = table) != null &&
+                   tab.length < MAXIMUM_CAPACITY) {
+                if (sc < 0) {
+                    if (sc == -1 || transferIndex <= transferOrigin ||
+                        (nt = nextTable) == null)
+                        break;
+                    if (U.compareAndSwapInt(this, SIZECTL, sc, sc - 1))
+                        transfer(tab, nt);
                 }
-            } finally {
-                sizeCtl = sc;
+                else if (U.compareAndSwapInt(this, SIZECTL, sc, -2))
+                    transfer(tab, null);
+                s = sumCount();
             }
         }
     }
@@ -1872,18 +1949,19 @@ public class ConcurrentHashMapV8
      *
      * @param size number of elements (doesn't need to be perfectly accurate)
      */
-    private final void tryPresize(int size) {
+    @SuppressWarnings("unchecked") private final void tryPresize(int size) {
         int c = (size >= (MAXIMUM_CAPACITY >>> 1)) ? MAXIMUM_CAPACITY :
             tableSizeFor(size + (size >>> 1) + 1);
         int sc;
         while ((sc = sizeCtl) >= 0) {
-            Node[] tab = table; int n;
+            Node[] tab = table; int n;
             if (tab == null || (n = tab.length) == 0) {
                 n = (sc > c) ? sc : c;
-                if (UNSAFE.compareAndSwapInt(this, sizeCtlOffset, sc, -1)) {
+                if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
                     try {
                         if (table == tab) {
-                            table = new Node[n];
+                            @SuppressWarnings("rawtypes") Node[] tb = new Node[n];
+                            table = (Node[])tb;
                             sc = n - (n >>> 2);
                         }
                     } finally {
@@ -1893,260 +1971,270 @@ public class ConcurrentHashMapV8
             }
             else if (c <= sc || n >= MAXIMUM_CAPACITY)
                 break;
-            else if (UNSAFE.compareAndSwapInt(this, sizeCtlOffset, sc, -1)) {
-                try {
-                    if (table == tab) {
-                        table = rebuild(tab);
-                        sc = (n << 1) - (n >>> 1);
-                    }
-                } finally {
-                    sizeCtl = sc;
-                }
-            }
+            else if (tab == table &&
+                     U.compareAndSwapInt(this, SIZECTL, sc, -2))
+                transfer(tab, null);
         }
     }
 
-    /*
+    /**
      * Moves and/or copies the nodes in each bin to new table. See
      * above for explanation.
-     *
-     * @return the new table
      */
-    private static final Node[] rebuild(Node[] tab) {
-        int n = tab.length;
-        Node[] nextTab = new Node[n << 1];
-        Node fwd = new Node(MOVED, nextTab, null, null);
-        int[] buffer = null;       // holds bins to revisit; null until needed
-        Node rev = null;           // reverse forwarder; null until needed
-        int nbuffered = 0;         // the number of bins in buffer list
-        int bufferIndex = 0;       // buffer index of current buffered bin
-        int bin = n - 1;           // current non-buffered bin or -1 if none
-
-        for (int i = bin;;) {      // start upwards sweep
-            int fh; Node f;
-            if ((f = tabAt(tab, i)) == null) {
-                if (bin >= 0) {    // no lock needed (or available)
-                    if (!casTabAt(tab, i, f, fwd))
-                        continue;
-                }
-                else {             // transiently use a locked forwarding node
-                    Node g = new Node(MOVED|LOCKED, nextTab, null, null);
-                    if (!casTabAt(tab, i, f, g))
-                        continue;
+    @SuppressWarnings("unchecked") private final void transfer
+        (Node[] tab, Node[] nextTab) {
+        int n = tab.length, stride;
+        if ((stride = (NCPU > 1) ? (n >>> 3) / NCPU : n) < MIN_TRANSFER_STRIDE)
+            stride = MIN_TRANSFER_STRIDE; // subdivide range
+        if (nextTab == null) {            // initiating
+            try {
+                @SuppressWarnings("rawtypes") Node[] tb = new Node[n << 1];
+                nextTab = (Node[])tb;
+            } catch (Throwable ex) {      // try to cope with OOME
+                sizeCtl = Integer.MAX_VALUE;
+                return;
+            }
+            nextTable = nextTab;
+            transferOrigin = n;
+            transferIndex = n;
+            Node rev = new Node(MOVED, tab, null, null);
+            for (int k = n; k > 0;) {    // progressively reveal ready slots
+                int nextk = (k > stride) ? k - stride : 0;
+                for (int m = nextk; m < k; ++m)
+                    nextTab[m] = rev;
+                for (int m = n + nextk; m < n + k; ++m)
+                    nextTab[m] = rev;
+                U.putOrderedInt(this, TRANSFERORIGIN, k = nextk);
+            }
+        }
+        int nextn = nextTab.length;
+        Node fwd = new Node(MOVED, nextTab, null, null);
+        boolean advance = true;
+        for (int i = 0, bound = 0;;) {
+            int nextIndex, nextBound; Node f; Object fk;
+            while (advance) {
+                if (--i >= bound)
+                    advance = false;
+                else if ((nextIndex = transferIndex) <= transferOrigin) {
+                    i = -1;
+                    advance = false;
+                }
+                else if (U.compareAndSwapInt
+                         (this, TRANSFERINDEX, nextIndex,
+                          nextBound = (nextIndex > stride ?
+                                       nextIndex - stride : 0))) {
+                    bound = nextBound;
+                    i = nextIndex - 1;
+                    advance = false;
+                }
+            }
+            if (i < 0 || i >= n || i + n >= nextn) {
+                for (int sc;;) {
+                    if (U.compareAndSwapInt(this, SIZECTL, sc = sizeCtl, ++sc)) {
+                        if (sc == -1) {
+                            nextTable = null;
+                            table = nextTab;
+                            sizeCtl = (n << 1) - (n >>> 1);
+                        }
+                        return;
+                    }
+                }
+            }
+            else if ((f = tabAt(tab, i)) == null) {
+                if (casTabAt(tab, i, null, fwd)) {
                     setTabAt(nextTab, i, null);
                     setTabAt(nextTab, i + n, null);
-                    setTabAt(tab, i, fwd);
-                    if (!g.casHash(MOVED|LOCKED, MOVED)) {
-                        g.hash = MOVED;
-                        synchronized (g) { g.notifyAll(); }
-                    }
+                    advance = true;
                 }
             }
-            else if ((fh = f.hash) == MOVED) {
-                Object fk = f.key;
-                if (fk instanceof TreeBin) {
-                    TreeBin t = (TreeBin)fk;
-                    boolean validated = false;
-                    t.acquire(0);
-                    try {
-                        if (tabAt(tab, i) == f) {
-                            validated = true;
-                            splitTreeBin(nextTab, i, t);
-                            setTabAt(tab, i, fwd);
+            else if (f.hash >= 0) {
+                synchronized (f) {
+                    if (tabAt(tab, i) == f) {
+                        int runBit = f.hash & n;
+                        Node lastRun = f, lo = null, hi = null;
+                        for (Node p = f.next; p != null; p = p.next) {
+                            int b = p.hash & n;
+                            if (b != runBit) {
+                                runBit = b;
+                                lastRun = p;
+                            }
                         }
-                    } finally {
-                        t.release(0);
+                        if (runBit == 0)
+                            lo = lastRun;
+                        else
+                            hi = lastRun;
+                        for (Node p = f; p != lastRun; p = p.next) {
+                            int ph = p.hash;
+                            Object pk = p.key; V pv = p.val;
+                            if ((ph & n) == 0)
+                                lo = new Node(ph, pk, pv, lo);
+                            else
+                                hi = new Node(ph, pk, pv, hi);
+                        }
+                        setTabAt(nextTab, i, lo);
+                        setTabAt(nextTab, i + n, hi);
+                        setTabAt(tab, i, fwd);
+                        advance = true;
                     }
-                    if (!validated)
-                        continue;
                 }
             }
-            else if ((fh & LOCKED) == 0 && f.casHash(fh, fh|LOCKED)) {
-                boolean validated = false;
-                try {              // split to lo and hi lists; copying as needed
+            else if ((fk = f.key) instanceof TreeBin) {
+                TreeBin t = (TreeBin)fk;
+                t.acquire(0);
+                try {
                     if (tabAt(tab, i) == f) {
-                        validated = true;
-                        splitBin(nextTab, i, f);
+                        TreeBin lt = new TreeBin();
+                        TreeBin ht = new TreeBin();
+                        int lc = 0, hc = 0;
+                        for (Node e = t.first; e != null; e = e.next) {
+                            int h = e.hash;
+                            Object k = e.key; V v = e.val;
+                            if ((h & n) == 0) {
+                                ++lc;
+                                lt.putTreeNode(h, k, v);
+                            }
+                            else {
+                                ++hc;
+                                ht.putTreeNode(h, k, v);
+                            }
+                        }
+                        Node ln, hn; // throw away trees if too small
+                        if (lc < TREE_THRESHOLD) {
+                            ln = null;
+                            for (Node p = lt.first; p != null; p = p.next)
+                                ln = new Node(p.hash, p.key, p.val, ln);
+                        }
+                        else
+                            ln = new Node(MOVED, lt, null, null);
+                        setTabAt(nextTab, i, ln);
+                        if (hc < TREE_THRESHOLD) {
+                            hn = null;
+                            for (Node p = ht.first; p != null; p = p.next)
+                                hn = new Node(p.hash, p.key, p.val, hn);
+                        }
+                        else
+                            hn = new Node(MOVED, ht, null, null);
+                        setTabAt(nextTab, i + n, hn);
                         setTabAt(tab, i, fwd);
+                        advance = true;
                     }
                 } finally {
-                    if (!f.casHash(fh | LOCKED, fh)) {
-                        f.hash = fh;
-                        synchronized (f) { f.notifyAll(); };
-                    }
+                    t.release(0);
                 }
-                if (!validated)
-                    continue;
-            }
-            else {
-                if (buffer == null) // initialize buffer for revisits
-                    buffer = new int[TRANSFER_BUFFER_SIZE];
-                if (bin < 0 && bufferIndex > 0) {
-                    int j = buffer[--bufferIndex];
-                    buffer[bufferIndex] = i;
-                    i = j;         // swap with another bin
-                    continue;
-                }
-                if (bin < 0 || nbuffered >= TRANSFER_BUFFER_SIZE) {
-                    f.tryAwaitLock(tab, i);
-                    continue;      // no other options -- block
-                }
-                if (rev == null)   // initialize reverse-forwarder
-                    rev = new Node(MOVED, tab, null, null);
-                if (tabAt(tab, i) != f || (f.hash & LOCKED) == 0)
-                    continue;      // recheck before adding to list
-                buffer[nbuffered++] = i;
-                setTabAt(nextTab, i, rev);     // install place-holders
-                setTabAt(nextTab, i + n, rev);
-            }
-
-            if (bin > 0)
-                i = --bin;
-            else if (buffer != null && nbuffered > 0) {
-                bin = -1;
-                i = buffer[bufferIndex = --nbuffered];
             }
             else
-                return nextTab;
+                advance = true; // already processed
         }
     }
 
-    /**
-     * Split a normal bin with list headed by e into lo and hi parts;
-     * install in given table
-     */
-    private static void splitBin(Node[] nextTab, int i, Node e) {
-        int bit = nextTab.length >>> 1; // bit to split on
-        int runBit = e.hash & bit;
-        Node lastRun = e, lo = null, hi = null;
-        for (Node p = e.next; p != null; p = p.next) {
-            int b = p.hash & bit;
-            if (b != runBit) {
-                runBit = b;
-                lastRun = p;
+    /* ---------------- Counter support -------------- */
+
+    final long sumCount() {
+        CounterCell[] as = counterCells; CounterCell a;
+        long sum = baseCount;
+        if (as != null) {
+            for (int i = 0; i < as.length; ++i) {
+                if ((a = as[i]) != null)
+                    sum += a.value;
             }
         }
-        if (runBit == 0)
-            lo = lastRun;
-        else
-            hi = lastRun;
-        for (Node p = e; p != lastRun; p = p.next) {
-            int ph = p.hash & HASH_BITS;
-            Object pk = p.key, pv = p.val;
-            if ((ph & bit) == 0)
-                lo = new Node(ph, pk, pv, lo);
-            else
-                hi = new Node(ph, pk, pv, hi);
-        }
-        setTabAt(nextTab, i, lo);
-        setTabAt(nextTab, i + bit, hi);
+        return sum;
     }
 
-    /**
-     * Split a tree bin into lo and hi parts; install in given table
-     */
-    private static void splitTreeBin(Node[] nextTab, int i, TreeBin t) {
-        int bit = nextTab.length >>> 1;
-        TreeBin lt = new TreeBin();
-        TreeBin ht = new TreeBin();
-        int lc = 0, hc = 0;
-        for (Node e = t.first; e != null; e = e.next) {
-            int h = e.hash & HASH_BITS;
-            Object k = e.key, v = e.val;
-            if ((h & bit) == 0) {
-                ++lc;
-                lt.putTreeNode(h, k, v);
-            }
-            else {
-                ++hc;
-                ht.putTreeNode(h, k, v);
-            }
-        }
-        Node ln, hn; // throw away trees if too small
-        if (lc <= (TREE_THRESHOLD >>> 1)) {
-            ln = null;
-            for (Node p = lt.first; p != null; p = p.next)
-                ln = new Node(p.hash, p.key, p.val, ln);
-        }
-        else
-            ln = new Node(MOVED, lt, null, null);
-        setTabAt(nextTab, i, ln);
-        if (hc <= (TREE_THRESHOLD >>> 1)) {
-            hn = null;
-            for (Node p = ht.first; p != null; p = p.next)
-                hn = new Node(p.hash, p.key, p.val, hn);
+    // See LongAdder version for explanation
+    private final void fullAddCount(long x, CounterHashCode hc,
+                                    boolean wasUncontended) {
+        int h;
+        if (hc == null) {
+            hc = new CounterHashCode();
+            int s = counterHashCodeGenerator.addAndGet(SEED_INCREMENT);
+            h = hc.code = (s == 0) ? 1 : s; // Avoid zero
+            threadCounterHashCode.set(hc);
         }
         else
-            hn = new Node(MOVED, ht, null, null);
-        setTabAt(nextTab, i + bit, hn);
-    }
-
-    /**
-     * Implementation for clear. Steps through each bin, removing all
-     * nodes.
-     */
-    private final void internalClear() {
-        long delta = 0L; // negative number of deletions
-        int i = 0;
-        Node[] tab = table;
-        while (tab != null && i < tab.length) {
-            int fh; Object fk;
-            Node f = tabAt(tab, i);
-            if (f == null)
-                ++i;
-            else if ((fh = f.hash) == MOVED) {
-                if ((fk = f.key) instanceof TreeBin) {
-                    TreeBin t = (TreeBin)fk;
-                    t.acquire(0);
-                    try {
-                        if (tabAt(tab, i) == f) {
-                            for (Node p = t.first; p != null; p = p.next) {
-                                p.val = null;
-                                --delta;
+            h = hc.code;
+        boolean collide = false;                // True if last slot nonempty
+        for (;;) {
+            CounterCell[] as; CounterCell a; int n; long v;
+            if ((as = counterCells) != null && (n = as.length) > 0) {
+                if ((a = as[(n - 1) & h]) == null) {
+                    if (counterBusy == 0) {            // Try to attach new Cell
+                        CounterCell r = new CounterCell(x); // Optimistic create
+                        if (counterBusy == 0 &&
+                            U.compareAndSwapInt(this, COUNTERBUSY, 0, 1)) {
+                            boolean created = false;
+                            try {               // Recheck under lock
+                                CounterCell[] rs; int m, j;
+                                if ((rs = counterCells) != null &&
+                                    (m = rs.length) > 0 &&
+                                    rs[j = (m - 1) & h] == null) {
+                                    rs[j] = r;
+                                    created = true;
+                                }
+                            } finally {
+                                counterBusy = 0;
                             }
-                            t.first = null;
-                            t.root = null;
-                            ++i;
+                            if (created)
+                                break;
+                            continue;           // Slot is now non-empty
                         }
-                    } finally {
-                        t.release(0);
                     }
+                    collide = false;
                 }
-                else
-                    tab = (Node[])fk;
-            }
-            else if ((fh & LOCKED) != 0) {
-                counter.add(delta); // opportunistically update count
-                delta = 0L;
-                f.tryAwaitLock(tab, i);
-            }
-            else if (f.casHash(fh, fh | LOCKED)) {
-                try {
-                    if (tabAt(tab, i) == f) {
-                        for (Node e = f; e != null; e = e.next) {
-                            e.val = null;
-                            --delta;
+                else if (!wasUncontended)       // CAS already known to fail
+                    wasUncontended = true;      // Continue after rehash
+                else if (U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))
+                    break;
+                else if (counterCells != as || n >= NCPU)
+                    collide = false;            // At max size or stale
+                else if (!collide)
+                    collide = true;
+                else if (counterBusy == 0 &&
+                         U.compareAndSwapInt(this, COUNTERBUSY, 0, 1)) {
+                    try {
+                        if (counterCells == as) {// Expand table unless stale
+                            CounterCell[] rs = new CounterCell[n << 1];
+                            for (int i = 0; i < n; ++i)
+                                rs[i] = as[i];
+                            counterCells = rs;
                         }
-                        setTabAt(tab, i, null);
-                        ++i;
+                    } finally {
+                        counterBusy = 0;
                     }
-                } finally {
-                    if (!f.casHash(fh | LOCKED, fh)) {
-                        f.hash = fh;
-                        synchronized (f) { f.notifyAll(); };
+                    collide = false;
+                    continue;                   // Retry with expanded table
+                }
+                h ^= h << 13;                   // Rehash
+                h ^= h >>> 17;
+                h ^= h << 5;
+            }
+            else if (counterBusy == 0 && counterCells == as &&
+                     U.compareAndSwapInt(this, COUNTERBUSY, 0, 1)) {
+                boolean init = false;
+                try {                           // Initialize table
+                    if (counterCells == as) {
+                        CounterCell[] rs = new CounterCell[2];
+                        rs[h & 1] = new CounterCell(x);
+                        counterCells = rs;
+                        init = true;
                     }
+                } finally {
+                    counterBusy = 0;
                 }
+                if (init)
+                    break;
             }
+            else if (U.compareAndSwapLong(this, BASECOUNT, v = baseCount, v + x))
+                break;                          // Fall back on using base
         }
-        if (delta != 0)
-            counter.add(delta);
+        hc.code = h;                            // Record index for next time
     }
 
     /* ----------------Table Traversal -------------- */
 
     /**
      * Encapsulates traversal for methods such as containsValue; also
-     * serves as a base class for other iterators.
+     * serves as a base class for other iterators and bulk tasks.
      *
      * At each step, the iterator snapshots the key ("nextKey") and
      * value ("nextVal") of a valid node (i.e., one that, at point of
@@ -2154,7 +2242,8 @@ public class ConcurrentHashMapV8
      * change (including to null, indicating deletion), field nextVal
      * might not be accurate at point of use, but still maintains the
      * weak consistency property of holding a value that was once
-     * valid.
+     * valid. To support iterator.remove, the nextKey field is not
+     * updated (nulled out) when the iterator cannot advance.
      *
      * Internal traversals directly access these fields, as in:
      * {@code while (it.advance() != null) { process(it.nextKey); }}
@@ -2180,56 +2269,76 @@ public class ConcurrentHashMapV8
      * paranoically cope with potential sharing by users of iterators
      * across threads, iteration terminates if a bounds checks fails
      * for a table read.
-     */
-    static class InternalIterator {
-        final ConcurrentHashMapV8 map;
-        Node next;           // the next entry to use
-        Node last;           // the last entry used
+     *
+     * This class extends CountedCompleter to streamline parallel
+     * iteration in bulk operations. This adds only a few fields of
+     * space overhead, which is small enough in cases where it is not
+     * needed to not worry about it.  Because CountedCompleter is
+     * Serializable, but iterators need not be, we need to add warning
+     * suppressions.
+     */
+    @SuppressWarnings("serial") static class Traverser
+        extends CountedCompleter {
+        final ConcurrentHashMapV8 map;
+        Node next;        // the next entry to use
         Object nextKey;      // cached key field of next
-        Object nextVal;      // cached val field of next
-        Node[] tab;          // current table; updated if resized
+        V nextVal;           // cached val field of next
+        Node[] tab;       // current table; updated if resized
         int index;           // index of bin to use next
         int baseIndex;       // current index of initial table
         int baseLimit;       // index bound for initial table
-        final int baseSize;  // initial table size
+        int baseSize;        // initial table size
+        int batch;           // split control
 
         /** Creates iterator for all entries in the table. */
-        InternalIterator(ConcurrentHashMapV8 map) {
-            this.tab = (this.map = map).table;
-            baseLimit = baseSize = (tab == null) ? 0 : tab.length;
+        Traverser(ConcurrentHashMapV8 map) {
+            this.map = map;
         }
 
-        /** Creates iterator for clone() and split() methods */
-        InternalIterator(InternalIterator it, boolean split) {
-            this.map = it.map;
-            this.tab = it.tab;
-            this.baseSize = it.baseSize;
-            int lo = it.baseIndex;
-            int hi = this.baseLimit = it.baseLimit;
-            this.index = this.baseIndex =
-                (split) ? (it.baseLimit = (lo + hi + 1) >>> 1) : lo;
+        /** Creates iterator for split() methods and task constructors */
+        Traverser(ConcurrentHashMapV8 map, Traverser it, int batch) {
+            super(it);
+            this.batch = batch;
+            if ((this.map = map) != null && it != null) { // split parent
+                Node[] t;
+                if ((t = it.tab) == null &&
+                    (t = it.tab = map.table) != null)
+                    it.baseLimit = it.baseSize = t.length;
+                this.tab = t;
+                this.baseSize = it.baseSize;
+                int hi = this.baseLimit = it.baseLimit;
+                it.baseLimit = this.index = this.baseIndex =
+                    (hi + it.baseIndex + 1) >>> 1;
+            }
         }
 
         /**
-         * Advances next; returns nextVal or null if terminated
+         * Advances next; returns nextVal or null if terminated.
          * See above for explanation.
          */
-        final Object advance() {
-            Node e = last = next;
-            Object ev = null;
+        @SuppressWarnings("unchecked") final V advance() {
+            Node e = next;
+            V ev = null;
             outer: do {
                 if (e != null)                  // advance past used/skipped node
                     e = e.next;
                 while (e == null) {             // get to next non-null bin
-                    Node[] t; int b, i, n; Object ek; // checks must use locals
-                    if ((b = baseIndex) >= baseLimit || (i = index) < 0 ||
-                        (t = tab) == null || i >= (n = t.length))
+                    ConcurrentHashMapV8 m;
+                    Node[] t; int b, i, n; Object ek; //  must use locals
+                    if ((t = tab) != null)
+                        n = t.length;
+                    else if ((m = map) != null && (t = tab = m.table) != null)
+                        n = baseLimit = baseSize = t.length;
+                    else
                         break outer;
-                    else if ((e = tabAt(t, i)) != null && e.hash == MOVED) {
+                    if ((b = baseIndex) >= baseLimit ||
+                        (i = index) < 0 || i >= n)
+                        break outer;
+                    if ((e = tabAt(t, i)) != null && e.hash < 0) {
                         if ((ek = e.key) instanceof TreeBin)
-                            e = ((TreeBin)ek).first;
+                            e = ((TreeBin)ek).first;
                         else {
-                            tab = (Node[])ek;
+                            tab = (Node[])ek;
                             continue;           // restarts due to null val
                         }
                     }                           // visit upper slots if present
@@ -2242,13 +2351,10 @@ public class ConcurrentHashMapV8
         }
 
         public final void remove() {
-            if (nextVal == null)
-                advance();
-            Node e = last;
-            if (e == null)
+            Object k = nextKey;
+            if (k == null && (advance() == null || (k = nextKey) == null))
                 throw new IllegalStateException();
-            last = null;
-            map.remove(e.key);
+            map.internalReplace(k, null, null);
         }
 
         public final boolean hasNext() {
@@ -2256,15 +2362,47 @@ public class ConcurrentHashMapV8
         }
 
         public final boolean hasMoreElements() { return hasNext(); }
+
+        public void compute() { } // default no-op CountedCompleter body
+
+        /**
+         * Returns a batch value > 0 if this task should (and must) be
+         * split, if so, adding to pending count, and in any case
+         * updating batch value. The initial batch value is approx
+         * exp2 of the number of times (minus one) to split task by
+         * two before executing leaf action. This value is faster to
+         * compute and more convenient to use as a guide to splitting
+         * than is the depth, since it is used while dividing by two
+         * anyway.
+         */
+        final int preSplit() {
+            ConcurrentHashMapV8 m; int b; Node[] t;  ForkJoinPool pool;
+            if ((b = batch) < 0 && (m = map) != null) { // force initialization
+                if ((t = tab) == null && (t = tab = m.table) != null)
+                    baseLimit = baseSize = t.length;
+                if (t != null) {
+                    long n = m.sumCount();
+                    int par = ((pool = getPool()) == null) ?
+                        ForkJoinPool.getCommonPoolParallelism() :
+                        pool.getParallelism();
+                    int sp = par << 3; // slack of 8
+                    b = (n <= 0L) ? 0 : (n < (long)sp) ? (int)n : sp;
+                }
+            }
+            b = (b <= 1 || baseIndex == baseLimit) ? 0 : (b >>> 1);
+            if ((batch = b) > 0)
+                addToPendingCount(1);
+            return b;
+        }
+
     }
 
     /* ---------------- Public operations -------------- */
 
     /**
-     * Creates a new, empty map with the default initial table size (16),
+     * Creates a new, empty map with the default initial table size (16).
      */
     public ConcurrentHashMapV8() {
-        this.counter = new LongAdder();
     }
 
     /**
@@ -2283,7 +2421,6 @@ public class ConcurrentHashMapV8
         int cap = ((initialCapacity >= (MAXIMUM_CAPACITY >>> 1)) ?
                    MAXIMUM_CAPACITY :
                    tableSizeFor(initialCapacity + (initialCapacity >>> 1) + 1));
-        this.counter = new LongAdder();
         this.sizeCtl = cap;
     }
 
@@ -2293,7 +2430,6 @@ public class ConcurrentHashMapV8
      * @param m the map
      */
     public ConcurrentHashMapV8(Map m) {
-        this.counter = new LongAdder();
         this.sizeCtl = DEFAULT_CAPACITY;
         internalPutAll(m);
     }
@@ -2342,32 +2478,66 @@ public class ConcurrentHashMapV8
         if (initialCapacity < concurrencyLevel)   // Use at least as many bins
             initialCapacity = concurrencyLevel;   // as estimated threads
         long size = (long)(1.0 + (long)initialCapacity / loadFactor);
-        int cap = ((size >= (long)MAXIMUM_CAPACITY) ?
-                   MAXIMUM_CAPACITY: tableSizeFor((int)size));
-        this.counter = new LongAdder();
+        int cap = (size >= (long)MAXIMUM_CAPACITY) ?
+            MAXIMUM_CAPACITY : tableSizeFor((int)size);
         this.sizeCtl = cap;
     }
 
     /**
+     * Creates a new {@link Set} backed by a ConcurrentHashMapV8
+     * from the given type to {@code Boolean.TRUE}.
+     *
+     * @return the new set
+     */
+    public static  KeySetView newKeySet() {
+        return new KeySetView(new ConcurrentHashMapV8(),
+                                      Boolean.TRUE);
+    }
+
+    /**
+     * Creates a new {@link Set} backed by a ConcurrentHashMapV8
+     * from the given type to {@code Boolean.TRUE}.
+     *
+     * @param initialCapacity The implementation performs internal
+     * sizing to accommodate this many elements.
+     * @throws IllegalArgumentException if the initial capacity of
+     * elements is negative
+     * @return the new set
+     */
+    public static  KeySetView newKeySet(int initialCapacity) {
+        return new KeySetView
+            (new ConcurrentHashMapV8(initialCapacity), Boolean.TRUE);
+    }
+
+    /**
      * {@inheritDoc}
      */
     public boolean isEmpty() {
-        return counter.sum() <= 0L; // ignore transient negative values
+        return sumCount() <= 0L; // ignore transient negative values
     }
 
     /**
      * {@inheritDoc}
      */
     public int size() {
-        long n = counter.sum();
+        long n = sumCount();
         return ((n < 0L) ? 0 :
                 (n > (long)Integer.MAX_VALUE) ? Integer.MAX_VALUE :
                 (int)n);
     }
 
-    final long longSize() { // accurate version of size needed for views
-        long n = counter.sum();
-        return (n < 0L) ? 0L : n;
+    /**
+     * Returns the number of mappings. This method should be used
+     * instead of {@link #size} because a ConcurrentHashMapV8 may
+     * contain more mappings than can be represented as an int. The
+     * value returned is an estimate; the actual count may differ if
+     * there are concurrent insertions or removals.
+     *
+     * @return the number of mappings
+     */
+    public long mappingCount() {
+        long n = sumCount();
+        return (n < 0L) ? 0L : n; // ignore transient negative values
     }
 
     /**
@@ -2381,11 +2551,23 @@ public class ConcurrentHashMapV8
      *
      * @throws NullPointerException if the specified key is null
      */
-    @SuppressWarnings("unchecked")
     public V get(Object key) {
-        if (key == null)
-            throw new NullPointerException();
-        return (V)internalGet(key);
+        return internalGet(key);
+    }
+
+    /**
+     * Returns the value to which the specified key is mapped,
+     * or the given defaultValue if this map contains no mapping for the key.
+     *
+     * @param key the key
+     * @param defaultValue the value to return if this map contains
+     * no mapping for the given key
+     * @return the mapping for the key, if present; else the defaultValue
+     * @throws NullPointerException if the specified key is null
+     */
+    public V getValueOrDefault(Object key, V defaultValue) {
+        V v;
+        return (v = internalGet(key)) == null ? defaultValue : v;
     }
 
     /**
@@ -2398,8 +2580,6 @@ public class ConcurrentHashMapV8
      * @throws NullPointerException if the specified key is null
      */
     public boolean containsKey(Object key) {
-        if (key == null)
-            throw new NullPointerException();
         return internalGet(key) != null;
     }
 
@@ -2416,8 +2596,8 @@ public class ConcurrentHashMapV8
     public boolean containsValue(Object value) {
         if (value == null)
             throw new NullPointerException();
-        Object v;
-        InternalIterator it = new InternalIterator(this);
+        V v;
+        Traverser it = new Traverser(this);
         while ((v = it.advance()) != null) {
             if (v == value || value.equals(v))
                 return true;
@@ -2440,7 +2620,7 @@ public class ConcurrentHashMapV8
      *         {@code false} otherwise
      * @throws NullPointerException if the specified value is null
      */
-    public boolean contains(Object value) {
+    @Deprecated public boolean contains(Object value) {
         return containsValue(value);
     }
 
@@ -2448,7 +2628,7 @@ public class ConcurrentHashMapV8
      * Maps the specified key to the specified value in this table.
      * Neither the key nor the value can be null.
      *
-     * 

The value can be retrieved by calling the {@code get} method + *

The value can be retrieved by calling the {@code get} method * with a key that is equal to the original key. * * @param key key with which the specified value is to be associated @@ -2457,11 +2637,8 @@ public class ConcurrentHashMapV8 * {@code null} if there was no mapping for {@code key} * @throws NullPointerException if the specified key or value is null */ - @SuppressWarnings("unchecked") public V put(K key, V value) { - if (key == null || value == null) - throw new NullPointerException(); - return (V)internalPut(key, value); + return internalPut(key, value, false); } /** @@ -2471,11 +2648,8 @@ public class ConcurrentHashMapV8 * or {@code null} if there was no mapping for the key * @throws NullPointerException if the specified key or value is null */ - @SuppressWarnings("unchecked") public V putIfAbsent(K key, V value) { - if (key == null || value == null) - throw new NullPointerException(); - return (V)internalPutIfAbsent(key, value); + return internalPut(key, value, true); } /** @@ -2496,7 +2670,7 @@ public class ConcurrentHashMapV8 *

 {@code
      * if (map.containsKey(key))
      *   return map.get(key);
-     * value = mappingFunction.map(key);
+     * value = mappingFunction.apply(key);
      * if (value != null)
      *   map.put(key, value);
      * return value;}
@@ -2513,13 +2687,13 @@ public class ConcurrentHashMapV8 * memoized result, as in: * *
 {@code
-     * map.computeIfAbsent(key, new MappingFunction() {
+     * map.computeIfAbsent(key, new Fun() {
      *   public V map(K k) { return new Value(f(k)); }});}
* * @param key key with which the specified value is to be associated * @param mappingFunction the function to compute a value * @return the current (existing or computed) value associated with - * the specified key, or null if the computed value is null. + * the specified key, or null if the computed value is null * @throws NullPointerException if the specified key or mappingFunction * is null * @throws IllegalStateException if the computation detectably @@ -2528,11 +2702,48 @@ public class ConcurrentHashMapV8 * @throws RuntimeException or Error if the mappingFunction does so, * in which case the mapping is left unestablished */ - @SuppressWarnings("unchecked") - public V computeIfAbsent(K key, MappingFunction mappingFunction) { - if (key == null || mappingFunction == null) - throw new NullPointerException(); - return (V)internalComputeIfAbsent(key, mappingFunction); + public V computeIfAbsent + (K key, Fun mappingFunction) { + return internalComputeIfAbsent(key, mappingFunction); + } + + /** + * If the given key is present, computes a new mapping value given a key and + * its current mapped value. This is equivalent to + *
 {@code
+     *   if (map.containsKey(key)) {
+     *     value = remappingFunction.apply(key, map.get(key));
+     *     if (value != null)
+     *       map.put(key, value);
+     *     else
+     *       map.remove(key);
+     *   }
+     * }
+ * + * except that the action is performed atomically. If the + * function returns {@code null}, the mapping is removed. If the + * function itself throws an (unchecked) exception, the exception + * is rethrown to its caller, and the current mapping is left + * unchanged. Some attempted update operations on this map by + * other threads may be blocked while computation is in progress, + * so the computation should be short and simple, and must not + * attempt to update any other mappings of this Map. For example, + * to either create or append new messages to a value mapping: + * + * @param key key with which the specified value is to be associated + * @param remappingFunction the function to compute a value + * @return the new value associated with the specified key, or null if none + * @throws NullPointerException if the specified key or remappingFunction + * is null + * @throws IllegalStateException if the computation detectably + * attempts a recursive update to this map that would + * otherwise never complete + * @throws RuntimeException or Error if the remappingFunction does so, + * in which case the mapping is unchanged + */ + public V computeIfPresent + (K key, BiFun remappingFunction) { + return internalCompute(key, true, remappingFunction); } /** @@ -2540,7 +2751,7 @@ public class ConcurrentHashMapV8 * its current mapped value (or {@code null} if there is no current * mapping). This is equivalent to *
 {@code
-     *   value = remappingFunction.remap(key, map.get(key));
+     *   value = remappingFunction.apply(key, map.get(key));
      *   if (value != null)
      *     map.put(key, value);
      *   else
@@ -2560,14 +2771,13 @@ public class ConcurrentHashMapV8
      * 
 {@code
      * Map map = ...;
      * final String msg = ...;
-     * map.compute(key, new RemappingFunction() {
-     *   public String remap(Key k, String v) {
+     * map.compute(key, new BiFun() {
+     *   public String apply(Key k, String v) {
      *    return (v == null) ? msg : v + msg;});}}
* * @param key key with which the specified value is to be associated * @param remappingFunction the function to compute a value - * @return the new value associated with - * the specified key, or null if none. + * @return the new value associated with the specified key, or null if none * @throws NullPointerException if the specified key or remappingFunction * is null * @throws IllegalStateException if the computation detectably @@ -2576,11 +2786,40 @@ public class ConcurrentHashMapV8 * @throws RuntimeException or Error if the remappingFunction does so, * in which case the mapping is unchanged */ - @SuppressWarnings("unchecked") - public V compute(K key, RemappingFunction remappingFunction) { - if (key == null || remappingFunction == null) - throw new NullPointerException(); - return (V)internalCompute(key, remappingFunction); + public V compute + (K key, BiFun remappingFunction) { + return internalCompute(key, false, remappingFunction); + } + + /** + * If the specified key is not already associated + * with a value, associate it with the given value. + * Otherwise, replace the value with the results of + * the given remapping function. This is equivalent to: + *
 {@code
+     *   if (!map.containsKey(key))
+     *     map.put(value);
+     *   else {
+     *     newValue = remappingFunction.apply(map.get(key), value);
+     *     if (value != null)
+     *       map.put(key, value);
+     *     else
+     *       map.remove(key);
+     *   }
+     * }
+ * except that the action is performed atomically. If the + * function returns {@code null}, the mapping is removed. If the + * function itself throws an (unchecked) exception, the exception + * is rethrown to its caller, and the current mapping is left + * unchanged. Some attempted update operations on this map by + * other threads may be blocked while computation is in progress, + * so the computation should be short and simple, and must not + * attempt to update any other mappings of this Map. + */ + public V merge + (K key, V value, + BiFun remappingFunction) { + return internalMerge(key, value, remappingFunction); } /** @@ -2592,11 +2831,8 @@ public class ConcurrentHashMapV8 * {@code null} if there was no mapping for {@code key} * @throws NullPointerException if the specified key is null */ - @SuppressWarnings("unchecked") public V remove(Object key) { - if (key == null) - throw new NullPointerException(); - return (V)internalReplace(key, null, null); + return internalReplace(key, null, null); } /** @@ -2605,11 +2841,7 @@ public class ConcurrentHashMapV8 * @throws NullPointerException if the specified key is null */ public boolean remove(Object key, Object value) { - if (key == null) - throw new NullPointerException(); - if (value == null) - return false; - return internalReplace(key, null, value) != null; + return value != null && internalReplace(key, null, value) != null; } /** @@ -2630,11 +2862,10 @@ public class ConcurrentHashMapV8 * or {@code null} if there was no mapping for the key * @throws NullPointerException if the specified key or value is null */ - @SuppressWarnings("unchecked") public V replace(K key, V value) { if (key == null || value == null) throw new NullPointerException(); - return (V)internalReplace(key, value, null); + return internalReplace(key, value, null); } /** @@ -2647,43 +2878,40 @@ public class ConcurrentHashMapV8 /** * Returns a {@link Set} view of the keys contained in this map. * The set is backed by the map, so changes to the map are - * reflected in the set, and vice-versa. The set supports element - * removal, which removes the corresponding mapping from this map, - * via the {@code Iterator.remove}, {@code Set.remove}, - * {@code removeAll}, {@code retainAll}, and {@code clear} - * operations. It does not support the {@code add} or - * {@code addAll} operations. + * reflected in the set, and vice-versa. * - *

The view's {@code iterator} is a "weakly consistent" iterator - * that will never throw {@link ConcurrentModificationException}, - * and guarantees to traverse elements as they existed upon - * construction of the iterator, and may (but is not guaranteed to) - * reflect any modifications subsequent to construction. + * @return the set view + */ + public KeySetView keySet() { + KeySetView ks = keySet; + return (ks != null) ? ks : (keySet = new KeySetView(this, null)); + } + + /** + * Returns a {@link Set} view of the keys in this map, using the + * given common mapped value for any additions (i.e., {@link + * Collection#add} and {@link Collection#addAll}). This is of + * course only appropriate if it is acceptable to use the same + * value for all additions from this view. + * + * @param mappedValue the mapped value to use for any additions + * @return the set view + * @throws NullPointerException if the mappedValue is null */ - public Set keySet() { - KeySet ks = keySet; - return (ks != null) ? ks : (keySet = new KeySet(this)); + public KeySetView keySet(V mappedValue) { + if (mappedValue == null) + throw new NullPointerException(); + return new KeySetView(this, mappedValue); } /** * Returns a {@link Collection} view of the values contained in this map. * The collection is backed by the map, so changes to the map are - * reflected in the collection, and vice-versa. The collection - * supports element removal, which removes the corresponding - * mapping from this map, via the {@code Iterator.remove}, - * {@code Collection.remove}, {@code removeAll}, - * {@code retainAll}, and {@code clear} operations. It does not - * support the {@code add} or {@code addAll} operations. - * - *

The view's {@code iterator} is a "weakly consistent" iterator - * that will never throw {@link ConcurrentModificationException}, - * and guarantees to traverse elements as they existed upon - * construction of the iterator, and may (but is not guaranteed to) - * reflect any modifications subsequent to construction. + * reflected in the collection, and vice-versa. */ - public Collection values() { - Values vs = values; - return (vs != null) ? vs : (values = new Values(this)); + public ValuesView values() { + ValuesView vs = values; + return (vs != null) ? vs : (values = new ValuesView(this)); } /** @@ -2703,8 +2931,8 @@ public class ConcurrentHashMapV8 * reflect any modifications subsequent to construction. */ public Set> entrySet() { - EntrySet es = entrySet; - return (es != null) ? es : (entrySet = new EntrySet(this)); + EntrySetView es = entrySet; + return (es != null) ? es : (entrySet = new EntrySetView(this)); } /** @@ -2728,27 +2956,27 @@ public class ConcurrentHashMapV8 } /** - * Returns a partionable iterator of the keys in this map. + * Returns a partitionable iterator of the keys in this map. * - * @return a partionable iterator of the keys in this map + * @return a partitionable iterator of the keys in this map */ public Spliterator keySpliterator() { return new KeyIterator(this); } /** - * Returns a partionable iterator of the values in this map. + * Returns a partitionable iterator of the values in this map. * - * @return a partionable iterator of the values in this map + * @return a partitionable iterator of the values in this map */ public Spliterator valueSpliterator() { return new ValueIterator(this); } /** - * Returns a partionable iterator of the entries in this map. + * Returns a partitionable iterator of the entries in this map. * - * @return a partionable iterator of the entries in this map + * @return a partitionable iterator of the entries in this map */ public Spliterator> entrySpliterator() { return new EntryIterator(this); @@ -2763,8 +2991,8 @@ public class ConcurrentHashMapV8 */ public int hashCode() { int h = 0; - InternalIterator it = new InternalIterator(this); - Object v; + Traverser it = new Traverser(this); + V v; while ((v = it.advance()) != null) { h += it.nextKey.hashCode() ^ v.hashCode(); } @@ -2783,10 +3011,10 @@ public class ConcurrentHashMapV8 * @return a string representation of this map */ public String toString() { - InternalIterator it = new InternalIterator(this); + Traverser it = new Traverser(this); StringBuilder sb = new StringBuilder(); sb.append('{'); - Object v; + V v; if ((v = it.advance()) != null) { for (;;) { Object k = it.nextKey; @@ -2816,8 +3044,8 @@ public class ConcurrentHashMapV8 if (!(o instanceof Map)) return false; Map m = (Map) o; - InternalIterator it = new InternalIterator(this); - Object val; + Traverser it = new Traverser(this); + V val; while ((val = it.advance()) != null) { Object v = m.get(it.nextKey); if (v == null || (v != val && !v.equals(val))) @@ -2837,25 +3065,19 @@ public class ConcurrentHashMapV8 /* ----------------Iterators -------------- */ - static final class KeyIterator extends InternalIterator + @SuppressWarnings("serial") static final class KeyIterator + extends Traverser implements Spliterator, Enumeration { - KeyIterator(ConcurrentHashMapV8 map) { super(map); } - KeyIterator(InternalIterator it, boolean split) { - super(it, split); + KeyIterator(ConcurrentHashMapV8 map) { super(map); } + KeyIterator(ConcurrentHashMapV8 map, Traverser it) { + super(map, it, -1); } public KeyIterator split() { - if (last != null || (next != null && nextVal == null)) - throw new IllegalStateException(); - return new KeyIterator(this, true); - } - public KeyIterator clone() { - if (last != null || (next != null && nextVal == null)) + if (nextKey != null) throw new IllegalStateException(); - return new KeyIterator(this, false); + return new KeyIterator(map, this); } - - @SuppressWarnings("unchecked") - public final K next() { + @SuppressWarnings("unchecked") public final K next() { if (nextVal == null && advance() == null) throw new NoSuchElementException(); Object k = nextKey; @@ -2866,72 +3088,61 @@ public class ConcurrentHashMapV8 public final K nextElement() { return next(); } } - static final class ValueIterator extends InternalIterator + @SuppressWarnings("serial") static final class ValueIterator + extends Traverser implements Spliterator, Enumeration { - ValueIterator(ConcurrentHashMapV8 map) { super(map); } - ValueIterator(InternalIterator it, boolean split) { - super(it, split); + ValueIterator(ConcurrentHashMapV8 map) { super(map); } + ValueIterator(ConcurrentHashMapV8 map, Traverser it) { + super(map, it, -1); } public ValueIterator split() { - if (last != null || (next != null && nextVal == null)) - throw new IllegalStateException(); - return new ValueIterator(this, true); - } - - public ValueIterator clone() { - if (last != null || (next != null && nextVal == null)) + if (nextKey != null) throw new IllegalStateException(); - return new ValueIterator(this, false); + return new ValueIterator(map, this); } - @SuppressWarnings("unchecked") public final V next() { - Object v; + V v; if ((v = nextVal) == null && (v = advance()) == null) throw new NoSuchElementException(); nextVal = null; - return (V) v; + return v; } public final V nextElement() { return next(); } } - static final class EntryIterator extends InternalIterator + @SuppressWarnings("serial") static final class EntryIterator + extends Traverser implements Spliterator> { - EntryIterator(ConcurrentHashMapV8 map) { super(map); } - EntryIterator(InternalIterator it, boolean split) { - super(it, split); + EntryIterator(ConcurrentHashMapV8 map) { super(map); } + EntryIterator(ConcurrentHashMapV8 map, Traverser it) { + super(map, it, -1); } public EntryIterator split() { - if (last != null || (next != null && nextVal == null)) - throw new IllegalStateException(); - return new EntryIterator(this, true); - } - public EntryIterator clone() { - if (last != null || (next != null && nextVal == null)) + if (nextKey != null) throw new IllegalStateException(); - return new EntryIterator(this, false); + return new EntryIterator(map, this); } - @SuppressWarnings("unchecked") - public final Map.Entry next() { - Object v; + @SuppressWarnings("unchecked") public final Map.Entry next() { + V v; if ((v = nextVal) == null && (v = advance()) == null) throw new NoSuchElementException(); Object k = nextKey; nextVal = null; - return new MapEntry((K)k, (V)v, map); + return new MapEntry((K)k, v, map); } } /** * Exported Entry for iterators */ - static final class MapEntry implements Map.Entry { + static final class MapEntry implements Map.Entry { final K key; // non-null V val; // non-null - final ConcurrentHashMapV8 map; - MapEntry(K key, V val, ConcurrentHashMapV8 map) { + final ConcurrentHashMapV8 map; + MapEntry(K key, V val, ConcurrentHashMapV8 map) { this.key = key; this.val = val; this.map = map; @@ -2952,8 +3163,8 @@ public class ConcurrentHashMapV8 /** * Sets our entry's value and writes through to the map. The - * value to return is somewhat arbitrary here. Since a we do - * not necessarily track asynchronous changes, the most recent + * value to return is somewhat arbitrary here. Since we do not + * necessarily track asynchronous changes, the most recent * "previous" value could be different from what we return (or * could even have been removed in which case the put will * re-establish). We do not and cannot guarantee more. @@ -2967,27 +3178,1430 @@ public class ConcurrentHashMapV8 } } + /** + * Returns exportable snapshot entry for the given key and value + * when write-through can't or shouldn't be used. + */ + static AbstractMap.SimpleEntry entryFor(K k, V v) { + return new AbstractMap.SimpleEntry(k, v); + } + + /* ---------------- Serialization Support -------------- */ + + /** + * Stripped-down version of helper class used in previous version, + * declared for the sake of serialization compatibility + */ + static class Segment implements Serializable { + private static final long serialVersionUID = 2249069246763182397L; + final float loadFactor; + Segment(float lf) { this.loadFactor = lf; } + } + + /** + * Saves the state of the {@code ConcurrentHashMapV8} instance to a + * stream (i.e., serializes it). + * @param s the stream + * @serialData + * the key (Object) and value (Object) + * for each key-value mapping, followed by a null pair. + * The key-value mappings are emitted in no particular order. + */ + @SuppressWarnings("unchecked") private void writeObject + (java.io.ObjectOutputStream s) + throws java.io.IOException { + if (segments == null) { // for serialization compatibility + segments = (Segment[]) + new Segment[DEFAULT_CONCURRENCY_LEVEL]; + for (int i = 0; i < segments.length; ++i) + segments[i] = new Segment(LOAD_FACTOR); + } + s.defaultWriteObject(); + Traverser it = new Traverser(this); + V v; + while ((v = it.advance()) != null) { + s.writeObject(it.nextKey); + s.writeObject(v); + } + s.writeObject(null); + s.writeObject(null); + segments = null; // throw away + } + + /** + * Reconstitutes the instance from a stream (that is, deserializes it). + * @param s the stream + */ + @SuppressWarnings("unchecked") private void readObject + (java.io.ObjectInputStream s) + throws java.io.IOException, ClassNotFoundException { + s.defaultReadObject(); + this.segments = null; // unneeded + + // Create all nodes, then place in table once size is known + long size = 0L; + Node p = null; + for (;;) { + K k = (K) s.readObject(); + V v = (V) s.readObject(); + if (k != null && v != null) { + int h = spread(k.hashCode()); + p = new Node(h, k, v, p); + ++size; + } + else + break; + } + if (p != null) { + boolean init = false; + int n; + if (size >= (long)(MAXIMUM_CAPACITY >>> 1)) + n = MAXIMUM_CAPACITY; + else { + int sz = (int)size; + n = tableSizeFor(sz + (sz >>> 1) + 1); + } + int sc = sizeCtl; + boolean collide = false; + if (n > sc && + U.compareAndSwapInt(this, SIZECTL, sc, -1)) { + try { + if (table == null) { + init = true; + @SuppressWarnings("rawtypes") Node[] rt = new Node[n]; + Node[] tab = (Node[])rt; + int mask = n - 1; + while (p != null) { + int j = p.hash & mask; + Node next = p.next; + Node q = p.next = tabAt(tab, j); + setTabAt(tab, j, p); + if (!collide && q != null && q.hash == p.hash) + collide = true; + p = next; + } + table = tab; + addCount(size, -1); + sc = n - (n >>> 2); + } + } finally { + sizeCtl = sc; + } + if (collide) { // rescan and convert to TreeBins + Node[] tab = table; + for (int i = 0; i < tab.length; ++i) { + int c = 0; + for (Node e = tabAt(tab, i); e != null; e = e.next) { + if (++c > TREE_THRESHOLD && + (e.key instanceof Comparable)) { + replaceWithTreeBin(tab, i, e.key); + break; + } + } + } + } + } + if (!init) { // Can only happen if unsafely published. + while (p != null) { + internalPut((K)p.key, p.val, false); + p = p.next; + } + } + } + } + + // ------------------------------------------------------- + + // Sams + /** Interface describing a void action of one argument */ + public interface Action { void apply(A a); } + /** Interface describing a void action of two arguments */ + public interface BiAction { void apply(A a, B b); } + /** Interface describing a function of one argument */ + public interface Fun { T apply(A a); } + /** Interface describing a function of two arguments */ + public interface BiFun { T apply(A a, B b); } + /** Interface describing a function of no arguments */ + public interface Generator { T apply(); } + /** Interface describing a function mapping its argument to a double */ + public interface ObjectToDouble { double apply(A a); } + /** Interface describing a function mapping its argument to a long */ + public interface ObjectToLong { long apply(A a); } + /** Interface describing a function mapping its argument to an int */ + public interface ObjectToInt {int apply(A a); } + /** Interface describing a function mapping two arguments to a double */ + public interface ObjectByObjectToDouble { double apply(A a, B b); } + /** Interface describing a function mapping two arguments to a long */ + public interface ObjectByObjectToLong { long apply(A a, B b); } + /** Interface describing a function mapping two arguments to an int */ + public interface ObjectByObjectToInt {int apply(A a, B b); } + /** Interface describing a function mapping a double to a double */ + public interface DoubleToDouble { double apply(double a); } + /** Interface describing a function mapping a long to a long */ + public interface LongToLong { long apply(long a); } + /** Interface describing a function mapping an int to an int */ + public interface IntToInt { int apply(int a); } + /** Interface describing a function mapping two doubles to a double */ + public interface DoubleByDoubleToDouble { double apply(double a, double b); } + /** Interface describing a function mapping two longs to a long */ + public interface LongByLongToLong { long apply(long a, long b); } + /** Interface describing a function mapping two ints to an int */ + public interface IntByIntToInt { int apply(int a, int b); } + + + // ------------------------------------------------------- + + // Sequential bulk operations + + /** + * Performs the given action for each (key, value). + * + * @param action the action + */ + @SuppressWarnings("unchecked") public void forEachSequentially + (BiAction action) { + if (action == null) throw new NullPointerException(); + Traverser it = new Traverser(this); + V v; + while ((v = it.advance()) != null) + action.apply((K)it.nextKey, v); + } + + /** + * Performs the given action for each non-null transformation + * of each (key, value). + * + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case the action is not applied) + * @param action the action + */ + @SuppressWarnings("unchecked") public void forEachSequentially + (BiFun transformer, + Action action) { + if (transformer == null || action == null) + throw new NullPointerException(); + Traverser it = new Traverser(this); + V v; U u; + while ((v = it.advance()) != null) { + if ((u = transformer.apply((K)it.nextKey, v)) != null) + action.apply(u); + } + } + + /** + * Returns a non-null result from applying the given search + * function on each (key, value), or null if none. + * + * @param searchFunction a function returning a non-null + * result on success, else null + * @return a non-null result from applying the given search + * function on each (key, value), or null if none + */ + @SuppressWarnings("unchecked") public U searchSequentially + (BiFun searchFunction) { + if (searchFunction == null) throw new NullPointerException(); + Traverser it = new Traverser(this); + V v; U u; + while ((v = it.advance()) != null) { + if ((u = searchFunction.apply((K)it.nextKey, v)) != null) + return u; + } + return null; + } + + /** + * Returns the result of accumulating the given transformation + * of all (key, value) pairs using the given reducer to + * combine values, or null if none. + * + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case it is not combined) + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all (key, value) pairs + */ + @SuppressWarnings("unchecked") public U reduceSequentially + (BiFun transformer, + BiFun reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + Traverser it = new Traverser(this); + U r = null, u; V v; + while ((v = it.advance()) != null) { + if ((u = transformer.apply((K)it.nextKey, v)) != null) + r = (r == null) ? u : reducer.apply(r, u); + } + return r; + } + + /** + * Returns the result of accumulating the given transformation + * of all (key, value) pairs using the given reducer to + * combine values, and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all (key, value) pairs + */ + @SuppressWarnings("unchecked") public double reduceToDoubleSequentially + (ObjectByObjectToDouble transformer, + double basis, + DoubleByDoubleToDouble reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + Traverser it = new Traverser(this); + double r = basis; V v; + while ((v = it.advance()) != null) + r = reducer.apply(r, transformer.apply((K)it.nextKey, v)); + return r; + } + + /** + * Returns the result of accumulating the given transformation + * of all (key, value) pairs using the given reducer to + * combine values, and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all (key, value) pairs + */ + @SuppressWarnings("unchecked") public long reduceToLongSequentially + (ObjectByObjectToLong transformer, + long basis, + LongByLongToLong reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + Traverser it = new Traverser(this); + long r = basis; V v; + while ((v = it.advance()) != null) + r = reducer.apply(r, transformer.apply((K)it.nextKey, v)); + return r; + } + + /** + * Returns the result of accumulating the given transformation + * of all (key, value) pairs using the given reducer to + * combine values, and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all (key, value) pairs + */ + @SuppressWarnings("unchecked") public int reduceToIntSequentially + (ObjectByObjectToInt transformer, + int basis, + IntByIntToInt reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + Traverser it = new Traverser(this); + int r = basis; V v; + while ((v = it.advance()) != null) + r = reducer.apply(r, transformer.apply((K)it.nextKey, v)); + return r; + } + + /** + * Performs the given action for each key. + * + * @param action the action + */ + @SuppressWarnings("unchecked") public void forEachKeySequentially + (Action action) { + if (action == null) throw new NullPointerException(); + Traverser it = new Traverser(this); + while (it.advance() != null) + action.apply((K)it.nextKey); + } + + /** + * Performs the given action for each non-null transformation + * of each key. + * + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case the action is not applied) + * @param action the action + */ + @SuppressWarnings("unchecked") public void forEachKeySequentially + (Fun transformer, + Action action) { + if (transformer == null || action == null) + throw new NullPointerException(); + Traverser it = new Traverser(this); + U u; + while (it.advance() != null) { + if ((u = transformer.apply((K)it.nextKey)) != null) + action.apply(u); + } + ForkJoinTasks.forEachKey + (this, transformer, action).invoke(); + } + + /** + * Returns a non-null result from applying the given search + * function on each key, or null if none. + * + * @param searchFunction a function returning a non-null + * result on success, else null + * @return a non-null result from applying the given search + * function on each key, or null if none + */ + @SuppressWarnings("unchecked") public U searchKeysSequentially + (Fun searchFunction) { + Traverser it = new Traverser(this); + U u; + while (it.advance() != null) { + if ((u = searchFunction.apply((K)it.nextKey)) != null) + return u; + } + return null; + } + + /** + * Returns the result of accumulating all keys using the given + * reducer to combine values, or null if none. + * + * @param reducer a commutative associative combining function + * @return the result of accumulating all keys using the given + * reducer to combine values, or null if none + */ + @SuppressWarnings("unchecked") public K reduceKeysSequentially + (BiFun reducer) { + if (reducer == null) throw new NullPointerException(); + Traverser it = new Traverser(this); + K r = null; + while (it.advance() != null) { + K u = (K)it.nextKey; + r = (r == null) ? u : reducer.apply(r, u); + } + return r; + } + + /** + * Returns the result of accumulating the given transformation + * of all keys using the given reducer to combine values, or + * null if none. + * + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case it is not combined) + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all keys + */ + @SuppressWarnings("unchecked") public U reduceKeysSequentially + (Fun transformer, + BiFun reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + Traverser it = new Traverser(this); + U r = null, u; + while (it.advance() != null) { + if ((u = transformer.apply((K)it.nextKey)) != null) + r = (r == null) ? u : reducer.apply(r, u); + } + return r; + } + + /** + * Returns the result of accumulating the given transformation + * of all keys using the given reducer to combine values, and + * the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all keys + */ + @SuppressWarnings("unchecked") public double reduceKeysToDoubleSequentially + (ObjectToDouble transformer, + double basis, + DoubleByDoubleToDouble reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + Traverser it = new Traverser(this); + double r = basis; + while (it.advance() != null) + r = reducer.apply(r, transformer.apply((K)it.nextKey)); + return r; + } + + /** + * Returns the result of accumulating the given transformation + * of all keys using the given reducer to combine values, and + * the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all keys + */ + @SuppressWarnings("unchecked") public long reduceKeysToLongSequentially + (ObjectToLong transformer, + long basis, + LongByLongToLong reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + Traverser it = new Traverser(this); + long r = basis; + while (it.advance() != null) + r = reducer.apply(r, transformer.apply((K)it.nextKey)); + return r; + } + + /** + * Returns the result of accumulating the given transformation + * of all keys using the given reducer to combine values, and + * the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all keys + */ + @SuppressWarnings("unchecked") public int reduceKeysToIntSequentially + (ObjectToInt transformer, + int basis, + IntByIntToInt reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + Traverser it = new Traverser(this); + int r = basis; + while (it.advance() != null) + r = reducer.apply(r, transformer.apply((K)it.nextKey)); + return r; + } + + /** + * Performs the given action for each value. + * + * @param action the action + */ + public void forEachValueSequentially(Action action) { + if (action == null) throw new NullPointerException(); + Traverser it = new Traverser(this); + V v; + while ((v = it.advance()) != null) + action.apply(v); + } + + /** + * Performs the given action for each non-null transformation + * of each value. + * + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case the action is not applied) + */ + public void forEachValueSequentially + (Fun transformer, + Action action) { + if (transformer == null || action == null) + throw new NullPointerException(); + Traverser it = new Traverser(this); + V v; U u; + while ((v = it.advance()) != null) { + if ((u = transformer.apply(v)) != null) + action.apply(u); + } + } + + /** + * Returns a non-null result from applying the given search + * function on each value, or null if none. + * + * @param searchFunction a function returning a non-null + * result on success, else null + * @return a non-null result from applying the given search + * function on each value, or null if none + */ + public U searchValuesSequentially + (Fun searchFunction) { + if (searchFunction == null) throw new NullPointerException(); + Traverser it = new Traverser(this); + V v; U u; + while ((v = it.advance()) != null) { + if ((u = searchFunction.apply(v)) != null) + return u; + } + return null; + } + + /** + * Returns the result of accumulating all values using the + * given reducer to combine values, or null if none. + * + * @param reducer a commutative associative combining function + * @return the result of accumulating all values + */ + public V reduceValuesSequentially + (BiFun reducer) { + if (reducer == null) throw new NullPointerException(); + Traverser it = new Traverser(this); + V r = null; V v; + while ((v = it.advance()) != null) + r = (r == null) ? v : reducer.apply(r, v); + return r; + } + + /** + * Returns the result of accumulating the given transformation + * of all values using the given reducer to combine values, or + * null if none. + * + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case it is not combined) + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all values + */ + public U reduceValuesSequentially + (Fun transformer, + BiFun reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + Traverser it = new Traverser(this); + U r = null, u; V v; + while ((v = it.advance()) != null) { + if ((u = transformer.apply(v)) != null) + r = (r == null) ? u : reducer.apply(r, u); + } + return r; + } + + /** + * Returns the result of accumulating the given transformation + * of all values using the given reducer to combine values, + * and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all values + */ + public double reduceValuesToDoubleSequentially + (ObjectToDouble transformer, + double basis, + DoubleByDoubleToDouble reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + Traverser it = new Traverser(this); + double r = basis; V v; + while ((v = it.advance()) != null) + r = reducer.apply(r, transformer.apply(v)); + return r; + } + + /** + * Returns the result of accumulating the given transformation + * of all values using the given reducer to combine values, + * and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all values + */ + public long reduceValuesToLongSequentially + (ObjectToLong transformer, + long basis, + LongByLongToLong reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + Traverser it = new Traverser(this); + long r = basis; V v; + while ((v = it.advance()) != null) + r = reducer.apply(r, transformer.apply(v)); + return r; + } + + /** + * Returns the result of accumulating the given transformation + * of all values using the given reducer to combine values, + * and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all values + */ + public int reduceValuesToIntSequentially + (ObjectToInt transformer, + int basis, + IntByIntToInt reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + Traverser it = new Traverser(this); + int r = basis; V v; + while ((v = it.advance()) != null) + r = reducer.apply(r, transformer.apply(v)); + return r; + } + + /** + * Performs the given action for each entry. + * + * @param action the action + */ + @SuppressWarnings("unchecked") public void forEachEntrySequentially + (Action> action) { + if (action == null) throw new NullPointerException(); + Traverser it = new Traverser(this); + V v; + while ((v = it.advance()) != null) + action.apply(entryFor((K)it.nextKey, v)); + } + + /** + * Performs the given action for each non-null transformation + * of each entry. + * + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case the action is not applied) + * @param action the action + */ + @SuppressWarnings("unchecked") public void forEachEntrySequentially + (Fun, ? extends U> transformer, + Action action) { + if (transformer == null || action == null) + throw new NullPointerException(); + Traverser it = new Traverser(this); + V v; U u; + while ((v = it.advance()) != null) { + if ((u = transformer.apply(entryFor((K)it.nextKey, v))) != null) + action.apply(u); + } + } + + /** + * Returns a non-null result from applying the given search + * function on each entry, or null if none. + * + * @param searchFunction a function returning a non-null + * result on success, else null + * @return a non-null result from applying the given search + * function on each entry, or null if none + */ + @SuppressWarnings("unchecked") public U searchEntriesSequentially + (Fun, ? extends U> searchFunction) { + if (searchFunction == null) throw new NullPointerException(); + Traverser it = new Traverser(this); + V v; U u; + while ((v = it.advance()) != null) { + if ((u = searchFunction.apply(entryFor((K)it.nextKey, v))) != null) + return u; + } + return null; + } + + /** + * Returns the result of accumulating all entries using the + * given reducer to combine values, or null if none. + * + * @param reducer a commutative associative combining function + * @return the result of accumulating all entries + */ + @SuppressWarnings("unchecked") public Map.Entry reduceEntriesSequentially + (BiFun, Map.Entry, ? extends Map.Entry> reducer) { + if (reducer == null) throw new NullPointerException(); + Traverser it = new Traverser(this); + Map.Entry r = null; V v; + while ((v = it.advance()) != null) { + Map.Entry u = entryFor((K)it.nextKey, v); + r = (r == null) ? u : reducer.apply(r, u); + } + return r; + } + + /** + * Returns the result of accumulating the given transformation + * of all entries using the given reducer to combine values, + * or null if none. + * + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case it is not combined) + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all entries + */ + @SuppressWarnings("unchecked") public U reduceEntriesSequentially + (Fun, ? extends U> transformer, + BiFun reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + Traverser it = new Traverser(this); + U r = null, u; V v; + while ((v = it.advance()) != null) { + if ((u = transformer.apply(entryFor((K)it.nextKey, v))) != null) + r = (r == null) ? u : reducer.apply(r, u); + } + return r; + } + + /** + * Returns the result of accumulating the given transformation + * of all entries using the given reducer to combine values, + * and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all entries + */ + @SuppressWarnings("unchecked") public double reduceEntriesToDoubleSequentially + (ObjectToDouble> transformer, + double basis, + DoubleByDoubleToDouble reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + Traverser it = new Traverser(this); + double r = basis; V v; + while ((v = it.advance()) != null) + r = reducer.apply(r, transformer.apply(entryFor((K)it.nextKey, v))); + return r; + } + + /** + * Returns the result of accumulating the given transformation + * of all entries using the given reducer to combine values, + * and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all entries + */ + @SuppressWarnings("unchecked") public long reduceEntriesToLongSequentially + (ObjectToLong> transformer, + long basis, + LongByLongToLong reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + Traverser it = new Traverser(this); + long r = basis; V v; + while ((v = it.advance()) != null) + r = reducer.apply(r, transformer.apply(entryFor((K)it.nextKey, v))); + return r; + } + + /** + * Returns the result of accumulating the given transformation + * of all entries using the given reducer to combine values, + * and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all entries + */ + @SuppressWarnings("unchecked") public int reduceEntriesToIntSequentially + (ObjectToInt> transformer, + int basis, + IntByIntToInt reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + Traverser it = new Traverser(this); + int r = basis; V v; + while ((v = it.advance()) != null) + r = reducer.apply(r, transformer.apply(entryFor((K)it.nextKey, v))); + return r; + } + + // Parallel bulk operations + + /** + * Performs the given action for each (key, value). + * + * @param action the action + */ + public void forEachInParallel(BiAction action) { + ForkJoinTasks.forEach + (this, action).invoke(); + } + + /** + * Performs the given action for each non-null transformation + * of each (key, value). + * + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case the action is not applied) + * @param action the action + */ + public void forEachInParallel + (BiFun transformer, + Action action) { + ForkJoinTasks.forEach + (this, transformer, action).invoke(); + } + + /** + * Returns a non-null result from applying the given search + * function on each (key, value), or null if none. Upon + * success, further element processing is suppressed and the + * results of any other parallel invocations of the search + * function are ignored. + * + * @param searchFunction a function returning a non-null + * result on success, else null + * @return a non-null result from applying the given search + * function on each (key, value), or null if none + */ + public U searchInParallel + (BiFun searchFunction) { + return ForkJoinTasks.search + (this, searchFunction).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all (key, value) pairs using the given reducer to + * combine values, or null if none. + * + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case it is not combined) + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all (key, value) pairs + */ + public U reduceInParallel + (BiFun transformer, + BiFun reducer) { + return ForkJoinTasks.reduce + (this, transformer, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all (key, value) pairs using the given reducer to + * combine values, and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all (key, value) pairs + */ + public double reduceToDoubleInParallel + (ObjectByObjectToDouble transformer, + double basis, + DoubleByDoubleToDouble reducer) { + return ForkJoinTasks.reduceToDouble + (this, transformer, basis, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all (key, value) pairs using the given reducer to + * combine values, and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all (key, value) pairs + */ + public long reduceToLongInParallel + (ObjectByObjectToLong transformer, + long basis, + LongByLongToLong reducer) { + return ForkJoinTasks.reduceToLong + (this, transformer, basis, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all (key, value) pairs using the given reducer to + * combine values, and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all (key, value) pairs + */ + public int reduceToIntInParallel + (ObjectByObjectToInt transformer, + int basis, + IntByIntToInt reducer) { + return ForkJoinTasks.reduceToInt + (this, transformer, basis, reducer).invoke(); + } + + /** + * Performs the given action for each key. + * + * @param action the action + */ + public void forEachKeyInParallel(Action action) { + ForkJoinTasks.forEachKey + (this, action).invoke(); + } + + /** + * Performs the given action for each non-null transformation + * of each key. + * + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case the action is not applied) + * @param action the action + */ + public void forEachKeyInParallel + (Fun transformer, + Action action) { + ForkJoinTasks.forEachKey + (this, transformer, action).invoke(); + } + + /** + * Returns a non-null result from applying the given search + * function on each key, or null if none. Upon success, + * further element processing is suppressed and the results of + * any other parallel invocations of the search function are + * ignored. + * + * @param searchFunction a function returning a non-null + * result on success, else null + * @return a non-null result from applying the given search + * function on each key, or null if none + */ + public U searchKeysInParallel + (Fun searchFunction) { + return ForkJoinTasks.searchKeys + (this, searchFunction).invoke(); + } + + /** + * Returns the result of accumulating all keys using the given + * reducer to combine values, or null if none. + * + * @param reducer a commutative associative combining function + * @return the result of accumulating all keys using the given + * reducer to combine values, or null if none + */ + public K reduceKeysInParallel + (BiFun reducer) { + return ForkJoinTasks.reduceKeys + (this, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all keys using the given reducer to combine values, or + * null if none. + * + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case it is not combined) + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all keys + */ + public U reduceKeysInParallel + (Fun transformer, + BiFun reducer) { + return ForkJoinTasks.reduceKeys + (this, transformer, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all keys using the given reducer to combine values, and + * the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all keys + */ + public double reduceKeysToDoubleInParallel + (ObjectToDouble transformer, + double basis, + DoubleByDoubleToDouble reducer) { + return ForkJoinTasks.reduceKeysToDouble + (this, transformer, basis, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all keys using the given reducer to combine values, and + * the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all keys + */ + public long reduceKeysToLongInParallel + (ObjectToLong transformer, + long basis, + LongByLongToLong reducer) { + return ForkJoinTasks.reduceKeysToLong + (this, transformer, basis, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all keys using the given reducer to combine values, and + * the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all keys + */ + public int reduceKeysToIntInParallel + (ObjectToInt transformer, + int basis, + IntByIntToInt reducer) { + return ForkJoinTasks.reduceKeysToInt + (this, transformer, basis, reducer).invoke(); + } + + /** + * Performs the given action for each value. + * + * @param action the action + */ + public void forEachValueInParallel(Action action) { + ForkJoinTasks.forEachValue + (this, action).invoke(); + } + + /** + * Performs the given action for each non-null transformation + * of each value. + * + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case the action is not applied) + */ + public void forEachValueInParallel + (Fun transformer, + Action action) { + ForkJoinTasks.forEachValue + (this, transformer, action).invoke(); + } + + /** + * Returns a non-null result from applying the given search + * function on each value, or null if none. Upon success, + * further element processing is suppressed and the results of + * any other parallel invocations of the search function are + * ignored. + * + * @param searchFunction a function returning a non-null + * result on success, else null + * @return a non-null result from applying the given search + * function on each value, or null if none + */ + public U searchValuesInParallel + (Fun searchFunction) { + return ForkJoinTasks.searchValues + (this, searchFunction).invoke(); + } + + /** + * Returns the result of accumulating all values using the + * given reducer to combine values, or null if none. + * + * @param reducer a commutative associative combining function + * @return the result of accumulating all values + */ + public V reduceValuesInParallel + (BiFun reducer) { + return ForkJoinTasks.reduceValues + (this, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all values using the given reducer to combine values, or + * null if none. + * + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case it is not combined) + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all values + */ + public U reduceValuesInParallel + (Fun transformer, + BiFun reducer) { + return ForkJoinTasks.reduceValues + (this, transformer, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all values using the given reducer to combine values, + * and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all values + */ + public double reduceValuesToDoubleInParallel + (ObjectToDouble transformer, + double basis, + DoubleByDoubleToDouble reducer) { + return ForkJoinTasks.reduceValuesToDouble + (this, transformer, basis, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all values using the given reducer to combine values, + * and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all values + */ + public long reduceValuesToLongInParallel + (ObjectToLong transformer, + long basis, + LongByLongToLong reducer) { + return ForkJoinTasks.reduceValuesToLong + (this, transformer, basis, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all values using the given reducer to combine values, + * and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all values + */ + public int reduceValuesToIntInParallel + (ObjectToInt transformer, + int basis, + IntByIntToInt reducer) { + return ForkJoinTasks.reduceValuesToInt + (this, transformer, basis, reducer).invoke(); + } + + /** + * Performs the given action for each entry. + * + * @param action the action + */ + public void forEachEntryInParallel(Action> action) { + ForkJoinTasks.forEachEntry + (this, action).invoke(); + } + + /** + * Performs the given action for each non-null transformation + * of each entry. + * + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case the action is not applied) + * @param action the action + */ + public void forEachEntryInParallel + (Fun, ? extends U> transformer, + Action action) { + ForkJoinTasks.forEachEntry + (this, transformer, action).invoke(); + } + + /** + * Returns a non-null result from applying the given search + * function on each entry, or null if none. Upon success, + * further element processing is suppressed and the results of + * any other parallel invocations of the search function are + * ignored. + * + * @param searchFunction a function returning a non-null + * result on success, else null + * @return a non-null result from applying the given search + * function on each entry, or null if none + */ + public U searchEntriesInParallel + (Fun, ? extends U> searchFunction) { + return ForkJoinTasks.searchEntries + (this, searchFunction).invoke(); + } + + /** + * Returns the result of accumulating all entries using the + * given reducer to combine values, or null if none. + * + * @param reducer a commutative associative combining function + * @return the result of accumulating all entries + */ + public Map.Entry reduceEntriesInParallel + (BiFun, Map.Entry, ? extends Map.Entry> reducer) { + return ForkJoinTasks.reduceEntries + (this, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all entries using the given reducer to combine values, + * or null if none. + * + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case it is not combined) + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all entries + */ + public U reduceEntriesInParallel + (Fun, ? extends U> transformer, + BiFun reducer) { + return ForkJoinTasks.reduceEntries + (this, transformer, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all entries using the given reducer to combine values, + * and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all entries + */ + public double reduceEntriesToDoubleInParallel + (ObjectToDouble> transformer, + double basis, + DoubleByDoubleToDouble reducer) { + return ForkJoinTasks.reduceEntriesToDouble + (this, transformer, basis, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all entries using the given reducer to combine values, + * and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all entries + */ + public long reduceEntriesToLongInParallel + (ObjectToLong> transformer, + long basis, + LongByLongToLong reducer) { + return ForkJoinTasks.reduceEntriesToLong + (this, transformer, basis, reducer).invoke(); + } + + /** + * Returns the result of accumulating the given transformation + * of all entries using the given reducer to combine values, + * and the given basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the result of accumulating the given transformation + * of all entries + */ + public int reduceEntriesToIntInParallel + (ObjectToInt> transformer, + int basis, + IntByIntToInt reducer) { + return ForkJoinTasks.reduceEntriesToInt + (this, transformer, basis, reducer).invoke(); + } + + /* ----------------Views -------------- */ /** * Base class for views. */ - static abstract class MapView { - final ConcurrentHashMapV8 map; - MapView(ConcurrentHashMapV8 map) { this.map = map; } + abstract static class CHMView { + final ConcurrentHashMapV8 map; + CHMView(ConcurrentHashMapV8 map) { this.map = map; } + + /** + * Returns the map backing this view. + * + * @return the map backing this view + */ + public ConcurrentHashMapV8 getMap() { return map; } + public final int size() { return map.size(); } public final boolean isEmpty() { return map.isEmpty(); } public final void clear() { map.clear(); } // implementations below rely on concrete classes supplying these - abstract public Iterator iterator(); - abstract public boolean contains(Object o); - abstract public boolean remove(Object o); + public abstract Iterator iterator(); + public abstract boolean contains(Object o); + public abstract boolean remove(Object o); private static final String oomeMsg = "Required array size too large"; public final Object[] toArray() { - long sz = map.longSize(); + long sz = map.mappingCount(); if (sz > (long)(MAX_ARRAY_SIZE)) throw new OutOfMemoryError(oomeMsg); int n = (int)sz; @@ -3009,9 +4623,8 @@ public class ConcurrentHashMapV8 return (i == n) ? r : Arrays.copyOf(r, i); } - @SuppressWarnings("unchecked") - public final T[] toArray(T[] a) { - long sz = map.longSize(); + @SuppressWarnings("unchecked") public final T[] toArray(T[] a) { + long sz = map.mappingCount(); if (sz > (long)(MAX_ARRAY_SIZE)) throw new OutOfMemoryError(oomeMsg); int m = (int)sz; @@ -3098,18 +4711,63 @@ public class ConcurrentHashMapV8 } - static final class KeySet extends MapView implements Set { - KeySet(ConcurrentHashMapV8 map) { super(map); } - public final boolean contains(Object o) { return map.containsKey(o); } - public final boolean remove(Object o) { return map.remove(o) != null; } - public final Iterator iterator() { - return new KeyIterator(map); - } - public final boolean add(K e) { - throw new UnsupportedOperationException(); + /** + * A view of a ConcurrentHashMapV8 as a {@link Set} of keys, in + * which additions may optionally be enabled by mapping to a + * common value. This class cannot be directly instantiated. See + * {@link #keySet()}, {@link #keySet(Object)}, {@link #newKeySet()}, + * {@link #newKeySet(int)}. + */ + public static class KeySetView extends CHMView + implements Set, java.io.Serializable { + private static final long serialVersionUID = 7249069246763182397L; + private final V value; + KeySetView(ConcurrentHashMapV8 map, V value) { // non-public + super(map); + this.value = value; } - public final boolean addAll(Collection c) { - throw new UnsupportedOperationException(); + + /** + * Returns the default mapped value for additions, + * or {@code null} if additions are not supported. + * + * @return the default mapped value for additions, or {@code null} + * if not supported + */ + public V getMappedValue() { return value; } + + // implement Set API + + public boolean contains(Object o) { return map.containsKey(o); } + public boolean remove(Object o) { return map.remove(o) != null; } + + /** + * Returns a "weakly consistent" iterator that will never + * throw {@link ConcurrentModificationException}, and + * guarantees to traverse elements as they existed upon + * construction of the iterator, and may (but is not + * guaranteed to) reflect any modifications subsequent to + * construction. + * + * @return an iterator over the keys of this map + */ + public Iterator iterator() { return new KeyIterator(map); } + public boolean add(K e) { + V v; + if ((v = value) == null) + throw new UnsupportedOperationException(); + return map.internalPut(e, v, true) == null; + } + public boolean addAll(Collection c) { + boolean added = false; + V v; + if ((v = value) == null) + throw new UnsupportedOperationException(); + for (K e : c) { + if (map.internalPut(e, v, true) == null) + added = true; + } + return added; } public boolean equals(Object o) { Set c; @@ -3119,9 +4777,20 @@ public class ConcurrentHashMapV8 } } - static final class Values extends MapView + /** + * A view of a ConcurrentHashMapV8 as a {@link Collection} of + * values, in which additions are disabled. This class cannot be + * directly instantiated. See {@link #values()}. + * + *

The view's {@code iterator} is a "weakly consistent" iterator + * that will never throw {@link ConcurrentModificationException}, + * and guarantees to traverse elements as they existed upon + * construction of the iterator, and may (but is not guaranteed to) + * reflect any modifications subsequent to construction. + */ + public static final class ValuesView extends CHMView implements Collection { - Values(ConcurrentHashMapV8 map) { super(map); } + ValuesView(ConcurrentHashMapV8 map) { super(map); } public final boolean contains(Object o) { return map.containsValue(o); } public final boolean remove(Object o) { if (o != null) { @@ -3135,6 +4804,17 @@ public class ConcurrentHashMapV8 } return false; } + + /** + * Returns a "weakly consistent" iterator that will never + * throw {@link ConcurrentModificationException}, and + * guarantees to traverse elements as they existed upon + * construction of the iterator, and may (but is not + * guaranteed to) reflect any modifications subsequent to + * construction. + * + * @return an iterator over the values of this map + */ public final Iterator iterator() { return new ValueIterator(map); } @@ -3144,11 +4824,17 @@ public class ConcurrentHashMapV8 public final boolean addAll(Collection c) { throw new UnsupportedOperationException(); } + } - static final class EntrySet extends MapView + /** + * A view of a ConcurrentHashMapV8 as a {@link Set} of (key, value) + * entries. This class cannot be directly instantiated. See + * {@link #entrySet()}. + */ + public static final class EntrySetView extends CHMView implements Set> { - EntrySet(ConcurrentHashMapV8 map) { super(map); } + EntrySetView(ConcurrentHashMapV8 map) { super(map); } public final boolean contains(Object o) { Object k, v, r; Map.Entry e; return ((o instanceof Map.Entry) && @@ -3164,14 +4850,31 @@ public class ConcurrentHashMapV8 (v = e.getValue()) != null && map.remove(k, v)); } + + /** + * Returns a "weakly consistent" iterator that will never + * throw {@link ConcurrentModificationException}, and + * guarantees to traverse elements as they existed upon + * construction of the iterator, and may (but is not + * guaranteed to) reflect any modifications subsequent to + * construction. + * + * @return an iterator over the entries of this map + */ public final Iterator> iterator() { return new EntryIterator(map); } + public final boolean add(Entry e) { - throw new UnsupportedOperationException(); + return map.internalPut(e.getKey(), e.getValue(), false) == null; } public final boolean addAll(Collection> c) { - throw new UnsupportedOperationException(); + boolean added = false; + for (Entry e : c) { + if (add(e)) + added = true; + } + return added; } public boolean equals(Object o) { Set c; @@ -3181,156 +4884,1944 @@ public class ConcurrentHashMapV8 } } - /* ---------------- Serialization Support -------------- */ + // --------------------------------------------------------------------- /** - * Stripped-down version of helper class used in previous version, - * declared for the sake of serialization compatibility + * Predefined tasks for performing bulk parallel operations on + * ConcurrentHashMapV8s. These tasks follow the forms and rules used + * for bulk operations. Each method has the same name, but returns + * a task rather than invoking it. These methods may be useful in + * custom applications such as submitting a task without waiting + * for completion, using a custom pool, or combining with other + * tasks. */ - static class Segment implements Serializable { - private static final long serialVersionUID = 2249069246763182397L; - final float loadFactor; - Segment(float lf) { this.loadFactor = lf; } - } + public static class ForkJoinTasks { + private ForkJoinTasks() {} - /** - * Saves the state of the {@code ConcurrentHashMapV8} instance to a - * stream (i.e., serializes it). - * @param s the stream - * @serialData - * the key (Object) and value (Object) - * for each key-value mapping, followed by a null pair. - * The key-value mappings are emitted in no particular order. - */ - @SuppressWarnings("unchecked") - private void writeObject(java.io.ObjectOutputStream s) - throws java.io.IOException { - if (segments == null) { // for serialization compatibility - segments = (Segment[]) - new Segment[DEFAULT_CONCURRENCY_LEVEL]; - for (int i = 0; i < segments.length; ++i) - segments[i] = new Segment(LOAD_FACTOR); + /** + * Returns a task that when invoked, performs the given + * action for each (key, value) + * + * @param map the map + * @param action the action + * @return the task + */ + public static ForkJoinTask forEach + (ConcurrentHashMapV8 map, + BiAction action) { + if (action == null) throw new NullPointerException(); + return new ForEachMappingTask(map, null, -1, action); } - s.defaultWriteObject(); - InternalIterator it = new InternalIterator(this); - Object v; - while ((v = it.advance()) != null) { - s.writeObject(it.nextKey); - s.writeObject(v); + + /** + * Returns a task that when invoked, performs the given + * action for each non-null transformation of each (key, value) + * + * @param map the map + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case the action is not applied) + * @param action the action + * @return the task + */ + public static ForkJoinTask forEach + (ConcurrentHashMapV8 map, + BiFun transformer, + Action action) { + if (transformer == null || action == null) + throw new NullPointerException(); + return new ForEachTransformedMappingTask + (map, null, -1, transformer, action); + } + + /** + * Returns a task that when invoked, returns a non-null result + * from applying the given search function on each (key, + * value), or null if none. Upon success, further element + * processing is suppressed and the results of any other + * parallel invocations of the search function are ignored. + * + * @param map the map + * @param searchFunction a function returning a non-null + * result on success, else null + * @return the task + */ + public static ForkJoinTask search + (ConcurrentHashMapV8 map, + BiFun searchFunction) { + if (searchFunction == null) throw new NullPointerException(); + return new SearchMappingsTask + (map, null, -1, searchFunction, + new AtomicReference()); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all (key, value) pairs + * using the given reducer to combine values, or null if none. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case it is not combined) + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduce + (ConcurrentHashMapV8 map, + BiFun transformer, + BiFun reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceMappingsTask + (map, null, -1, null, transformer, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all (key, value) pairs + * using the given reducer to combine values, and the given + * basis as an identity value. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceToDouble + (ConcurrentHashMapV8 map, + ObjectByObjectToDouble transformer, + double basis, + DoubleByDoubleToDouble reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceMappingsToDoubleTask + (map, null, -1, null, transformer, basis, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all (key, value) pairs + * using the given reducer to combine values, and the given + * basis as an identity value. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceToLong + (ConcurrentHashMapV8 map, + ObjectByObjectToLong transformer, + long basis, + LongByLongToLong reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceMappingsToLongTask + (map, null, -1, null, transformer, basis, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all (key, value) pairs + * using the given reducer to combine values, and the given + * basis as an identity value. + * + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceToInt + (ConcurrentHashMapV8 map, + ObjectByObjectToInt transformer, + int basis, + IntByIntToInt reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceMappingsToIntTask + (map, null, -1, null, transformer, basis, reducer); + } + + /** + * Returns a task that when invoked, performs the given action + * for each key. + * + * @param map the map + * @param action the action + * @return the task + */ + public static ForkJoinTask forEachKey + (ConcurrentHashMapV8 map, + Action action) { + if (action == null) throw new NullPointerException(); + return new ForEachKeyTask(map, null, -1, action); + } + + /** + * Returns a task that when invoked, performs the given action + * for each non-null transformation of each key. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case the action is not applied) + * @param action the action + * @return the task + */ + public static ForkJoinTask forEachKey + (ConcurrentHashMapV8 map, + Fun transformer, + Action action) { + if (transformer == null || action == null) + throw new NullPointerException(); + return new ForEachTransformedKeyTask + (map, null, -1, transformer, action); + } + + /** + * Returns a task that when invoked, returns a non-null result + * from applying the given search function on each key, or + * null if none. Upon success, further element processing is + * suppressed and the results of any other parallel + * invocations of the search function are ignored. + * + * @param map the map + * @param searchFunction a function returning a non-null + * result on success, else null + * @return the task + */ + public static ForkJoinTask searchKeys + (ConcurrentHashMapV8 map, + Fun searchFunction) { + if (searchFunction == null) throw new NullPointerException(); + return new SearchKeysTask + (map, null, -1, searchFunction, + new AtomicReference()); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating all keys using the given reducer to combine + * values, or null if none. + * + * @param map the map + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceKeys + (ConcurrentHashMapV8 map, + BiFun reducer) { + if (reducer == null) throw new NullPointerException(); + return new ReduceKeysTask + (map, null, -1, null, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all keys using the given + * reducer to combine values, or null if none. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case it is not combined) + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceKeys + (ConcurrentHashMapV8 map, + Fun transformer, + BiFun reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceKeysTask + (map, null, -1, null, transformer, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all keys using the given + * reducer to combine values, and the given basis as an + * identity value. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceKeysToDouble + (ConcurrentHashMapV8 map, + ObjectToDouble transformer, + double basis, + DoubleByDoubleToDouble reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceKeysToDoubleTask + (map, null, -1, null, transformer, basis, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all keys using the given + * reducer to combine values, and the given basis as an + * identity value. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceKeysToLong + (ConcurrentHashMapV8 map, + ObjectToLong transformer, + long basis, + LongByLongToLong reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceKeysToLongTask + (map, null, -1, null, transformer, basis, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all keys using the given + * reducer to combine values, and the given basis as an + * identity value. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceKeysToInt + (ConcurrentHashMapV8 map, + ObjectToInt transformer, + int basis, + IntByIntToInt reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceKeysToIntTask + (map, null, -1, null, transformer, basis, reducer); + } + + /** + * Returns a task that when invoked, performs the given action + * for each value. + * + * @param map the map + * @param action the action + */ + public static ForkJoinTask forEachValue + (ConcurrentHashMapV8 map, + Action action) { + if (action == null) throw new NullPointerException(); + return new ForEachValueTask(map, null, -1, action); + } + + /** + * Returns a task that when invoked, performs the given action + * for each non-null transformation of each value. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case the action is not applied) + * @param action the action + */ + public static ForkJoinTask forEachValue + (ConcurrentHashMapV8 map, + Fun transformer, + Action action) { + if (transformer == null || action == null) + throw new NullPointerException(); + return new ForEachTransformedValueTask + (map, null, -1, transformer, action); + } + + /** + * Returns a task that when invoked, returns a non-null result + * from applying the given search function on each value, or + * null if none. Upon success, further element processing is + * suppressed and the results of any other parallel + * invocations of the search function are ignored. + * + * @param map the map + * @param searchFunction a function returning a non-null + * result on success, else null + * @return the task + */ + public static ForkJoinTask searchValues + (ConcurrentHashMapV8 map, + Fun searchFunction) { + if (searchFunction == null) throw new NullPointerException(); + return new SearchValuesTask + (map, null, -1, searchFunction, + new AtomicReference()); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating all values using the given reducer to combine + * values, or null if none. + * + * @param map the map + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceValues + (ConcurrentHashMapV8 map, + BiFun reducer) { + if (reducer == null) throw new NullPointerException(); + return new ReduceValuesTask + (map, null, -1, null, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all values using the + * given reducer to combine values, or null if none. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case it is not combined) + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceValues + (ConcurrentHashMapV8 map, + Fun transformer, + BiFun reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceValuesTask + (map, null, -1, null, transformer, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all values using the + * given reducer to combine values, and the given basis as an + * identity value. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceValuesToDouble + (ConcurrentHashMapV8 map, + ObjectToDouble transformer, + double basis, + DoubleByDoubleToDouble reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceValuesToDoubleTask + (map, null, -1, null, transformer, basis, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all values using the + * given reducer to combine values, and the given basis as an + * identity value. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceValuesToLong + (ConcurrentHashMapV8 map, + ObjectToLong transformer, + long basis, + LongByLongToLong reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceValuesToLongTask + (map, null, -1, null, transformer, basis, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all values using the + * given reducer to combine values, and the given basis as an + * identity value. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceValuesToInt + (ConcurrentHashMapV8 map, + ObjectToInt transformer, + int basis, + IntByIntToInt reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceValuesToIntTask + (map, null, -1, null, transformer, basis, reducer); + } + + /** + * Returns a task that when invoked, perform the given action + * for each entry. + * + * @param map the map + * @param action the action + */ + public static ForkJoinTask forEachEntry + (ConcurrentHashMapV8 map, + Action> action) { + if (action == null) throw new NullPointerException(); + return new ForEachEntryTask(map, null, -1, action); + } + + /** + * Returns a task that when invoked, perform the given action + * for each non-null transformation of each entry. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case the action is not applied) + * @param action the action + */ + public static ForkJoinTask forEachEntry + (ConcurrentHashMapV8 map, + Fun, ? extends U> transformer, + Action action) { + if (transformer == null || action == null) + throw new NullPointerException(); + return new ForEachTransformedEntryTask + (map, null, -1, transformer, action); + } + + /** + * Returns a task that when invoked, returns a non-null result + * from applying the given search function on each entry, or + * null if none. Upon success, further element processing is + * suppressed and the results of any other parallel + * invocations of the search function are ignored. + * + * @param map the map + * @param searchFunction a function returning a non-null + * result on success, else null + * @return the task + */ + public static ForkJoinTask searchEntries + (ConcurrentHashMapV8 map, + Fun, ? extends U> searchFunction) { + if (searchFunction == null) throw new NullPointerException(); + return new SearchEntriesTask + (map, null, -1, searchFunction, + new AtomicReference()); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating all entries using the given reducer to combine + * values, or null if none. + * + * @param map the map + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask> reduceEntries + (ConcurrentHashMapV8 map, + BiFun, Map.Entry, ? extends Map.Entry> reducer) { + if (reducer == null) throw new NullPointerException(); + return new ReduceEntriesTask + (map, null, -1, null, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all entries using the + * given reducer to combine values, or null if none. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element, or null if there is no transformation (in + * which case it is not combined) + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceEntries + (ConcurrentHashMapV8 map, + Fun, ? extends U> transformer, + BiFun reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceEntriesTask + (map, null, -1, null, transformer, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all entries using the + * given reducer to combine values, and the given basis as an + * identity value. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceEntriesToDouble + (ConcurrentHashMapV8 map, + ObjectToDouble> transformer, + double basis, + DoubleByDoubleToDouble reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceEntriesToDoubleTask + (map, null, -1, null, transformer, basis, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all entries using the + * given reducer to combine values, and the given basis as an + * identity value. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceEntriesToLong + (ConcurrentHashMapV8 map, + ObjectToLong> transformer, + long basis, + LongByLongToLong reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceEntriesToLongTask + (map, null, -1, null, transformer, basis, reducer); + } + + /** + * Returns a task that when invoked, returns the result of + * accumulating the given transformation of all entries using the + * given reducer to combine values, and the given basis as an + * identity value. + * + * @param map the map + * @param transformer a function returning the transformation + * for an element + * @param basis the identity (initial default value) for the reduction + * @param reducer a commutative associative combining function + * @return the task + */ + public static ForkJoinTask reduceEntriesToInt + (ConcurrentHashMapV8 map, + ObjectToInt> transformer, + int basis, + IntByIntToInt reducer) { + if (transformer == null || reducer == null) + throw new NullPointerException(); + return new MapReduceEntriesToIntTask + (map, null, -1, null, transformer, basis, reducer); } - s.writeObject(null); - s.writeObject(null); - segments = null; // throw away } - /** - * Reconstitutes the instance from a stream (that is, deserializes it). - * @param s the stream - */ - @SuppressWarnings("unchecked") - private void readObject(java.io.ObjectInputStream s) - throws java.io.IOException, ClassNotFoundException { - s.defaultReadObject(); - this.segments = null; // unneeded - // initialize transient final field - UNSAFE.putObjectVolatile(this, counterOffset, new LongAdder()); + // ------------------------------------------------------- - // Create all nodes, then place in table once size is known - long size = 0L; - Node p = null; - for (;;) { - K k = (K) s.readObject(); - V v = (V) s.readObject(); - if (k != null && v != null) { - int h = spread(k.hashCode()); - p = new Node(h, k, v, p); - ++size; + /* + * Task classes. Coded in a regular but ugly format/style to + * simplify checks that each variant differs in the right way from + * others. The null screenings exist because compilers cannot tell + * that we've already null-checked task arguments, so we force + * simplest hoisted bypass to help avoid convoluted traps. + */ + + @SuppressWarnings("serial") static final class ForEachKeyTask + extends Traverser { + final Action action; + ForEachKeyTask + (ConcurrentHashMapV8 m, Traverser p, int b, + Action action) { + super(m, p, b); + this.action = action; + } + @SuppressWarnings("unchecked") public final void compute() { + final Action action; + if ((action = this.action) != null) { + for (int b; (b = preSplit()) > 0;) + new ForEachKeyTask(map, this, b, action).fork(); + while (advance() != null) + action.apply((K)nextKey); + propagateCompletion(); + } + } + } + + @SuppressWarnings("serial") static final class ForEachValueTask + extends Traverser { + final Action action; + ForEachValueTask + (ConcurrentHashMapV8 m, Traverser p, int b, + Action action) { + super(m, p, b); + this.action = action; + } + @SuppressWarnings("unchecked") public final void compute() { + final Action action; + if ((action = this.action) != null) { + for (int b; (b = preSplit()) > 0;) + new ForEachValueTask(map, this, b, action).fork(); + V v; + while ((v = advance()) != null) + action.apply(v); + propagateCompletion(); + } + } + } + + @SuppressWarnings("serial") static final class ForEachEntryTask + extends Traverser { + final Action> action; + ForEachEntryTask + (ConcurrentHashMapV8 m, Traverser p, int b, + Action> action) { + super(m, p, b); + this.action = action; + } + @SuppressWarnings("unchecked") public final void compute() { + final Action> action; + if ((action = this.action) != null) { + for (int b; (b = preSplit()) > 0;) + new ForEachEntryTask(map, this, b, action).fork(); + V v; + while ((v = advance()) != null) + action.apply(entryFor((K)nextKey, v)); + propagateCompletion(); + } + } + } + + @SuppressWarnings("serial") static final class ForEachMappingTask + extends Traverser { + final BiAction action; + ForEachMappingTask + (ConcurrentHashMapV8 m, Traverser p, int b, + BiAction action) { + super(m, p, b); + this.action = action; + } + @SuppressWarnings("unchecked") public final void compute() { + final BiAction action; + if ((action = this.action) != null) { + for (int b; (b = preSplit()) > 0;) + new ForEachMappingTask(map, this, b, action).fork(); + V v; + while ((v = advance()) != null) + action.apply((K)nextKey, v); + propagateCompletion(); + } + } + } + + @SuppressWarnings("serial") static final class ForEachTransformedKeyTask + extends Traverser { + final Fun transformer; + final Action action; + ForEachTransformedKeyTask + (ConcurrentHashMapV8 m, Traverser p, int b, + Fun transformer, Action action) { + super(m, p, b); + this.transformer = transformer; this.action = action; + } + @SuppressWarnings("unchecked") public final void compute() { + final Fun transformer; + final Action action; + if ((transformer = this.transformer) != null && + (action = this.action) != null) { + for (int b; (b = preSplit()) > 0;) + new ForEachTransformedKeyTask + (map, this, b, transformer, action).fork(); + U u; + while (advance() != null) { + if ((u = transformer.apply((K)nextKey)) != null) + action.apply(u); + } + propagateCompletion(); + } + } + } + + @SuppressWarnings("serial") static final class ForEachTransformedValueTask + extends Traverser { + final Fun transformer; + final Action action; + ForEachTransformedValueTask + (ConcurrentHashMapV8 m, Traverser p, int b, + Fun transformer, Action action) { + super(m, p, b); + this.transformer = transformer; this.action = action; + } + @SuppressWarnings("unchecked") public final void compute() { + final Fun transformer; + final Action action; + if ((transformer = this.transformer) != null && + (action = this.action) != null) { + for (int b; (b = preSplit()) > 0;) + new ForEachTransformedValueTask + (map, this, b, transformer, action).fork(); + V v; U u; + while ((v = advance()) != null) { + if ((u = transformer.apply(v)) != null) + action.apply(u); + } + propagateCompletion(); + } + } + } + + @SuppressWarnings("serial") static final class ForEachTransformedEntryTask + extends Traverser { + final Fun, ? extends U> transformer; + final Action action; + ForEachTransformedEntryTask + (ConcurrentHashMapV8 m, Traverser p, int b, + Fun, ? extends U> transformer, Action action) { + super(m, p, b); + this.transformer = transformer; this.action = action; + } + @SuppressWarnings("unchecked") public final void compute() { + final Fun, ? extends U> transformer; + final Action action; + if ((transformer = this.transformer) != null && + (action = this.action) != null) { + for (int b; (b = preSplit()) > 0;) + new ForEachTransformedEntryTask + (map, this, b, transformer, action).fork(); + V v; U u; + while ((v = advance()) != null) { + if ((u = transformer.apply(entryFor((K)nextKey, + v))) != null) + action.apply(u); + } + propagateCompletion(); + } + } + } + + @SuppressWarnings("serial") static final class ForEachTransformedMappingTask + extends Traverser { + final BiFun transformer; + final Action action; + ForEachTransformedMappingTask + (ConcurrentHashMapV8 m, Traverser p, int b, + BiFun transformer, + Action action) { + super(m, p, b); + this.transformer = transformer; this.action = action; + } + @SuppressWarnings("unchecked") public final void compute() { + final BiFun transformer; + final Action action; + if ((transformer = this.transformer) != null && + (action = this.action) != null) { + for (int b; (b = preSplit()) > 0;) + new ForEachTransformedMappingTask + (map, this, b, transformer, action).fork(); + V v; U u; + while ((v = advance()) != null) { + if ((u = transformer.apply((K)nextKey, v)) != null) + action.apply(u); + } + propagateCompletion(); + } + } + } + + @SuppressWarnings("serial") static final class SearchKeysTask + extends Traverser { + final Fun searchFunction; + final AtomicReference result; + SearchKeysTask + (ConcurrentHashMapV8 m, Traverser p, int b, + Fun searchFunction, + AtomicReference result) { + super(m, p, b); + this.searchFunction = searchFunction; this.result = result; + } + public final U getRawResult() { return result.get(); } + @SuppressWarnings("unchecked") public final void compute() { + final Fun searchFunction; + final AtomicReference result; + if ((searchFunction = this.searchFunction) != null && + (result = this.result) != null) { + for (int b;;) { + if (result.get() != null) + return; + if ((b = preSplit()) <= 0) + break; + new SearchKeysTask + (map, this, b, searchFunction, result).fork(); + } + while (result.get() == null) { + U u; + if (advance() == null) { + propagateCompletion(); + break; + } + if ((u = searchFunction.apply((K)nextKey)) != null) { + if (result.compareAndSet(null, u)) + quietlyCompleteRoot(); + break; + } + } } - else - break; } - if (p != null) { - boolean init = false; - int n; - if (size >= (long)(MAXIMUM_CAPACITY >>> 1)) - n = MAXIMUM_CAPACITY; - else { - int sz = (int)size; - n = tableSizeFor(sz + (sz >>> 1) + 1); + } + + @SuppressWarnings("serial") static final class SearchValuesTask + extends Traverser { + final Fun searchFunction; + final AtomicReference result; + SearchValuesTask + (ConcurrentHashMapV8 m, Traverser p, int b, + Fun searchFunction, + AtomicReference result) { + super(m, p, b); + this.searchFunction = searchFunction; this.result = result; + } + public final U getRawResult() { return result.get(); } + @SuppressWarnings("unchecked") public final void compute() { + final Fun searchFunction; + final AtomicReference result; + if ((searchFunction = this.searchFunction) != null && + (result = this.result) != null) { + for (int b;;) { + if (result.get() != null) + return; + if ((b = preSplit()) <= 0) + break; + new SearchValuesTask + (map, this, b, searchFunction, result).fork(); + } + while (result.get() == null) { + V v; U u; + if ((v = advance()) == null) { + propagateCompletion(); + break; + } + if ((u = searchFunction.apply(v)) != null) { + if (result.compareAndSet(null, u)) + quietlyCompleteRoot(); + break; + } + } } - int sc = sizeCtl; - boolean collide = false; - if (n > sc && - UNSAFE.compareAndSwapInt(this, sizeCtlOffset, sc, -1)) { - try { - if (table == null) { - init = true; - Node[] tab = new Node[n]; - int mask = n - 1; - while (p != null) { - int j = p.hash & mask; - Node next = p.next; - Node q = p.next = tabAt(tab, j); - setTabAt(tab, j, p); - if (!collide && q != null && q.hash == p.hash) - collide = true; - p = next; - } - table = tab; - counter.add(size); - sc = n - (n >>> 2); + } + } + + @SuppressWarnings("serial") static final class SearchEntriesTask + extends Traverser { + final Fun, ? extends U> searchFunction; + final AtomicReference result; + SearchEntriesTask + (ConcurrentHashMapV8 m, Traverser p, int b, + Fun, ? extends U> searchFunction, + AtomicReference result) { + super(m, p, b); + this.searchFunction = searchFunction; this.result = result; + } + public final U getRawResult() { return result.get(); } + @SuppressWarnings("unchecked") public final void compute() { + final Fun, ? extends U> searchFunction; + final AtomicReference result; + if ((searchFunction = this.searchFunction) != null && + (result = this.result) != null) { + for (int b;;) { + if (result.get() != null) + return; + if ((b = preSplit()) <= 0) + break; + new SearchEntriesTask + (map, this, b, searchFunction, result).fork(); + } + while (result.get() == null) { + V v; U u; + if ((v = advance()) == null) { + propagateCompletion(); + break; + } + if ((u = searchFunction.apply(entryFor((K)nextKey, + v))) != null) { + if (result.compareAndSet(null, u)) + quietlyCompleteRoot(); + return; } - } finally { - sizeCtl = sc; } - if (collide) { // rescan and convert to TreeBins - Node[] tab = table; - for (int i = 0; i < tab.length; ++i) { - int c = 0; - for (Node e = tabAt(tab, i); e != null; e = e.next) { - if (++c > TREE_THRESHOLD && - (e.key instanceof Comparable)) { - replaceWithTreeBin(tab, i, e.key); - break; - } - } + } + } + } + + @SuppressWarnings("serial") static final class SearchMappingsTask + extends Traverser { + final BiFun searchFunction; + final AtomicReference result; + SearchMappingsTask + (ConcurrentHashMapV8 m, Traverser p, int b, + BiFun searchFunction, + AtomicReference result) { + super(m, p, b); + this.searchFunction = searchFunction; this.result = result; + } + public final U getRawResult() { return result.get(); } + @SuppressWarnings("unchecked") public final void compute() { + final BiFun searchFunction; + final AtomicReference result; + if ((searchFunction = this.searchFunction) != null && + (result = this.result) != null) { + for (int b;;) { + if (result.get() != null) + return; + if ((b = preSplit()) <= 0) + break; + new SearchMappingsTask + (map, this, b, searchFunction, result).fork(); + } + while (result.get() == null) { + V v; U u; + if ((v = advance()) == null) { + propagateCompletion(); + break; + } + if ((u = searchFunction.apply((K)nextKey, v)) != null) { + if (result.compareAndSet(null, u)) + quietlyCompleteRoot(); + break; } } } - if (!init) { // Can only happen if unsafely published. - while (p != null) { - internalPut(p.key, p.val); - p = p.next; + } + } + + @SuppressWarnings("serial") static final class ReduceKeysTask + extends Traverser { + final BiFun reducer; + K result; + ReduceKeysTask rights, nextRight; + ReduceKeysTask + (ConcurrentHashMapV8 m, Traverser p, int b, + ReduceKeysTask nextRight, + BiFun reducer) { + super(m, p, b); this.nextRight = nextRight; + this.reducer = reducer; + } + public final K getRawResult() { return result; } + @SuppressWarnings("unchecked") public final void compute() { + final BiFun reducer; + if ((reducer = this.reducer) != null) { + for (int b; (b = preSplit()) > 0;) + (rights = new ReduceKeysTask + (map, this, b, rights, reducer)).fork(); + K r = null; + while (advance() != null) { + K u = (K)nextKey; + r = (r == null) ? u : reducer.apply(r, u); + } + result = r; + CountedCompleter c; + for (c = firstComplete(); c != null; c = c.nextComplete()) { + ReduceKeysTask + t = (ReduceKeysTask)c, + s = t.rights; + while (s != null) { + K tr, sr; + if ((sr = s.result) != null) + t.result = (((tr = t.result) == null) ? sr : + reducer.apply(tr, sr)); + s = t.rights = s.nextRight; + } + } + } + } + } + + @SuppressWarnings("serial") static final class ReduceValuesTask + extends Traverser { + final BiFun reducer; + V result; + ReduceValuesTask rights, nextRight; + ReduceValuesTask + (ConcurrentHashMapV8 m, Traverser p, int b, + ReduceValuesTask nextRight, + BiFun reducer) { + super(m, p, b); this.nextRight = nextRight; + this.reducer = reducer; + } + public final V getRawResult() { return result; } + @SuppressWarnings("unchecked") public final void compute() { + final BiFun reducer; + if ((reducer = this.reducer) != null) { + for (int b; (b = preSplit()) > 0;) + (rights = new ReduceValuesTask + (map, this, b, rights, reducer)).fork(); + V r = null; + V v; + while ((v = advance()) != null) { + V u = v; + r = (r == null) ? u : reducer.apply(r, u); + } + result = r; + CountedCompleter c; + for (c = firstComplete(); c != null; c = c.nextComplete()) { + ReduceValuesTask + t = (ReduceValuesTask)c, + s = t.rights; + while (s != null) { + V tr, sr; + if ((sr = s.result) != null) + t.result = (((tr = t.result) == null) ? sr : + reducer.apply(tr, sr)); + s = t.rights = s.nextRight; + } + } + } + } + } + + @SuppressWarnings("serial") static final class ReduceEntriesTask + extends Traverser> { + final BiFun, Map.Entry, ? extends Map.Entry> reducer; + Map.Entry result; + ReduceEntriesTask rights, nextRight; + ReduceEntriesTask + (ConcurrentHashMapV8 m, Traverser p, int b, + ReduceEntriesTask nextRight, + BiFun, Map.Entry, ? extends Map.Entry> reducer) { + super(m, p, b); this.nextRight = nextRight; + this.reducer = reducer; + } + public final Map.Entry getRawResult() { return result; } + @SuppressWarnings("unchecked") public final void compute() { + final BiFun, Map.Entry, ? extends Map.Entry> reducer; + if ((reducer = this.reducer) != null) { + for (int b; (b = preSplit()) > 0;) + (rights = new ReduceEntriesTask + (map, this, b, rights, reducer)).fork(); + Map.Entry r = null; + V v; + while ((v = advance()) != null) { + Map.Entry u = entryFor((K)nextKey, v); + r = (r == null) ? u : reducer.apply(r, u); + } + result = r; + CountedCompleter c; + for (c = firstComplete(); c != null; c = c.nextComplete()) { + ReduceEntriesTask + t = (ReduceEntriesTask)c, + s = t.rights; + while (s != null) { + Map.Entry tr, sr; + if ((sr = s.result) != null) + t.result = (((tr = t.result) == null) ? sr : + reducer.apply(tr, sr)); + s = t.rights = s.nextRight; + } + } + } + } + } + + @SuppressWarnings("serial") static final class MapReduceKeysTask + extends Traverser { + final Fun transformer; + final BiFun reducer; + U result; + MapReduceKeysTask rights, nextRight; + MapReduceKeysTask + (ConcurrentHashMapV8 m, Traverser p, int b, + MapReduceKeysTask nextRight, + Fun transformer, + BiFun reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.reducer = reducer; + } + public final U getRawResult() { return result; } + @SuppressWarnings("unchecked") public final void compute() { + final Fun transformer; + final BiFun reducer; + if ((transformer = this.transformer) != null && + (reducer = this.reducer) != null) { + for (int b; (b = preSplit()) > 0;) + (rights = new MapReduceKeysTask + (map, this, b, rights, transformer, reducer)).fork(); + U r = null, u; + while (advance() != null) { + if ((u = transformer.apply((K)nextKey)) != null) + r = (r == null) ? u : reducer.apply(r, u); + } + result = r; + CountedCompleter c; + for (c = firstComplete(); c != null; c = c.nextComplete()) { + MapReduceKeysTask + t = (MapReduceKeysTask)c, + s = t.rights; + while (s != null) { + U tr, sr; + if ((sr = s.result) != null) + t.result = (((tr = t.result) == null) ? sr : + reducer.apply(tr, sr)); + s = t.rights = s.nextRight; + } + } + } + } + } + + @SuppressWarnings("serial") static final class MapReduceValuesTask + extends Traverser { + final Fun transformer; + final BiFun reducer; + U result; + MapReduceValuesTask rights, nextRight; + MapReduceValuesTask + (ConcurrentHashMapV8 m, Traverser p, int b, + MapReduceValuesTask nextRight, + Fun transformer, + BiFun reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.reducer = reducer; + } + public final U getRawResult() { return result; } + @SuppressWarnings("unchecked") public final void compute() { + final Fun transformer; + final BiFun reducer; + if ((transformer = this.transformer) != null && + (reducer = this.reducer) != null) { + for (int b; (b = preSplit()) > 0;) + (rights = new MapReduceValuesTask + (map, this, b, rights, transformer, reducer)).fork(); + U r = null, u; + V v; + while ((v = advance()) != null) { + if ((u = transformer.apply(v)) != null) + r = (r == null) ? u : reducer.apply(r, u); + } + result = r; + CountedCompleter c; + for (c = firstComplete(); c != null; c = c.nextComplete()) { + MapReduceValuesTask + t = (MapReduceValuesTask)c, + s = t.rights; + while (s != null) { + U tr, sr; + if ((sr = s.result) != null) + t.result = (((tr = t.result) == null) ? sr : + reducer.apply(tr, sr)); + s = t.rights = s.nextRight; + } + } + } + } + } + + @SuppressWarnings("serial") static final class MapReduceEntriesTask + extends Traverser { + final Fun, ? extends U> transformer; + final BiFun reducer; + U result; + MapReduceEntriesTask rights, nextRight; + MapReduceEntriesTask + (ConcurrentHashMapV8 m, Traverser p, int b, + MapReduceEntriesTask nextRight, + Fun, ? extends U> transformer, + BiFun reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.reducer = reducer; + } + public final U getRawResult() { return result; } + @SuppressWarnings("unchecked") public final void compute() { + final Fun, ? extends U> transformer; + final BiFun reducer; + if ((transformer = this.transformer) != null && + (reducer = this.reducer) != null) { + for (int b; (b = preSplit()) > 0;) + (rights = new MapReduceEntriesTask + (map, this, b, rights, transformer, reducer)).fork(); + U r = null, u; + V v; + while ((v = advance()) != null) { + if ((u = transformer.apply(entryFor((K)nextKey, + v))) != null) + r = (r == null) ? u : reducer.apply(r, u); + } + result = r; + CountedCompleter c; + for (c = firstComplete(); c != null; c = c.nextComplete()) { + MapReduceEntriesTask + t = (MapReduceEntriesTask)c, + s = t.rights; + while (s != null) { + U tr, sr; + if ((sr = s.result) != null) + t.result = (((tr = t.result) == null) ? sr : + reducer.apply(tr, sr)); + s = t.rights = s.nextRight; + } + } + } + } + } + + @SuppressWarnings("serial") static final class MapReduceMappingsTask + extends Traverser { + final BiFun transformer; + final BiFun reducer; + U result; + MapReduceMappingsTask rights, nextRight; + MapReduceMappingsTask + (ConcurrentHashMapV8 m, Traverser p, int b, + MapReduceMappingsTask nextRight, + BiFun transformer, + BiFun reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.reducer = reducer; + } + public final U getRawResult() { return result; } + @SuppressWarnings("unchecked") public final void compute() { + final BiFun transformer; + final BiFun reducer; + if ((transformer = this.transformer) != null && + (reducer = this.reducer) != null) { + for (int b; (b = preSplit()) > 0;) + (rights = new MapReduceMappingsTask + (map, this, b, rights, transformer, reducer)).fork(); + U r = null, u; + V v; + while ((v = advance()) != null) { + if ((u = transformer.apply((K)nextKey, v)) != null) + r = (r == null) ? u : reducer.apply(r, u); + } + result = r; + CountedCompleter c; + for (c = firstComplete(); c != null; c = c.nextComplete()) { + MapReduceMappingsTask + t = (MapReduceMappingsTask)c, + s = t.rights; + while (s != null) { + U tr, sr; + if ((sr = s.result) != null) + t.result = (((tr = t.result) == null) ? sr : + reducer.apply(tr, sr)); + s = t.rights = s.nextRight; + } + } + } + } + } + + @SuppressWarnings("serial") static final class MapReduceKeysToDoubleTask + extends Traverser { + final ObjectToDouble transformer; + final DoubleByDoubleToDouble reducer; + final double basis; + double result; + MapReduceKeysToDoubleTask rights, nextRight; + MapReduceKeysToDoubleTask + (ConcurrentHashMapV8 m, Traverser p, int b, + MapReduceKeysToDoubleTask nextRight, + ObjectToDouble transformer, + double basis, + DoubleByDoubleToDouble reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.basis = basis; this.reducer = reducer; + } + public final Double getRawResult() { return result; } + @SuppressWarnings("unchecked") public final void compute() { + final ObjectToDouble transformer; + final DoubleByDoubleToDouble reducer; + if ((transformer = this.transformer) != null && + (reducer = this.reducer) != null) { + double r = this.basis; + for (int b; (b = preSplit()) > 0;) + (rights = new MapReduceKeysToDoubleTask + (map, this, b, rights, transformer, r, reducer)).fork(); + while (advance() != null) + r = reducer.apply(r, transformer.apply((K)nextKey)); + result = r; + CountedCompleter c; + for (c = firstComplete(); c != null; c = c.nextComplete()) { + MapReduceKeysToDoubleTask + t = (MapReduceKeysToDoubleTask)c, + s = t.rights; + while (s != null) { + t.result = reducer.apply(t.result, s.result); + s = t.rights = s.nextRight; + } + } + } + } + } + + @SuppressWarnings("serial") static final class MapReduceValuesToDoubleTask + extends Traverser { + final ObjectToDouble transformer; + final DoubleByDoubleToDouble reducer; + final double basis; + double result; + MapReduceValuesToDoubleTask rights, nextRight; + MapReduceValuesToDoubleTask + (ConcurrentHashMapV8 m, Traverser p, int b, + MapReduceValuesToDoubleTask nextRight, + ObjectToDouble transformer, + double basis, + DoubleByDoubleToDouble reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.basis = basis; this.reducer = reducer; + } + public final Double getRawResult() { return result; } + @SuppressWarnings("unchecked") public final void compute() { + final ObjectToDouble transformer; + final DoubleByDoubleToDouble reducer; + if ((transformer = this.transformer) != null && + (reducer = this.reducer) != null) { + double r = this.basis; + for (int b; (b = preSplit()) > 0;) + (rights = new MapReduceValuesToDoubleTask + (map, this, b, rights, transformer, r, reducer)).fork(); + V v; + while ((v = advance()) != null) + r = reducer.apply(r, transformer.apply(v)); + result = r; + CountedCompleter c; + for (c = firstComplete(); c != null; c = c.nextComplete()) { + MapReduceValuesToDoubleTask + t = (MapReduceValuesToDoubleTask)c, + s = t.rights; + while (s != null) { + t.result = reducer.apply(t.result, s.result); + s = t.rights = s.nextRight; + } + } + } + } + } + + @SuppressWarnings("serial") static final class MapReduceEntriesToDoubleTask + extends Traverser { + final ObjectToDouble> transformer; + final DoubleByDoubleToDouble reducer; + final double basis; + double result; + MapReduceEntriesToDoubleTask rights, nextRight; + MapReduceEntriesToDoubleTask + (ConcurrentHashMapV8 m, Traverser p, int b, + MapReduceEntriesToDoubleTask nextRight, + ObjectToDouble> transformer, + double basis, + DoubleByDoubleToDouble reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.basis = basis; this.reducer = reducer; + } + public final Double getRawResult() { return result; } + @SuppressWarnings("unchecked") public final void compute() { + final ObjectToDouble> transformer; + final DoubleByDoubleToDouble reducer; + if ((transformer = this.transformer) != null && + (reducer = this.reducer) != null) { + double r = this.basis; + for (int b; (b = preSplit()) > 0;) + (rights = new MapReduceEntriesToDoubleTask + (map, this, b, rights, transformer, r, reducer)).fork(); + V v; + while ((v = advance()) != null) + r = reducer.apply(r, transformer.apply(entryFor((K)nextKey, + v))); + result = r; + CountedCompleter c; + for (c = firstComplete(); c != null; c = c.nextComplete()) { + MapReduceEntriesToDoubleTask + t = (MapReduceEntriesToDoubleTask)c, + s = t.rights; + while (s != null) { + t.result = reducer.apply(t.result, s.result); + s = t.rights = s.nextRight; + } + } + } + } + } + + @SuppressWarnings("serial") static final class MapReduceMappingsToDoubleTask + extends Traverser { + final ObjectByObjectToDouble transformer; + final DoubleByDoubleToDouble reducer; + final double basis; + double result; + MapReduceMappingsToDoubleTask rights, nextRight; + MapReduceMappingsToDoubleTask + (ConcurrentHashMapV8 m, Traverser p, int b, + MapReduceMappingsToDoubleTask nextRight, + ObjectByObjectToDouble transformer, + double basis, + DoubleByDoubleToDouble reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.basis = basis; this.reducer = reducer; + } + public final Double getRawResult() { return result; } + @SuppressWarnings("unchecked") public final void compute() { + final ObjectByObjectToDouble transformer; + final DoubleByDoubleToDouble reducer; + if ((transformer = this.transformer) != null && + (reducer = this.reducer) != null) { + double r = this.basis; + for (int b; (b = preSplit()) > 0;) + (rights = new MapReduceMappingsToDoubleTask + (map, this, b, rights, transformer, r, reducer)).fork(); + V v; + while ((v = advance()) != null) + r = reducer.apply(r, transformer.apply((K)nextKey, v)); + result = r; + CountedCompleter c; + for (c = firstComplete(); c != null; c = c.nextComplete()) { + MapReduceMappingsToDoubleTask + t = (MapReduceMappingsToDoubleTask)c, + s = t.rights; + while (s != null) { + t.result = reducer.apply(t.result, s.result); + s = t.rights = s.nextRight; + } + } + } + } + } + + @SuppressWarnings("serial") static final class MapReduceKeysToLongTask + extends Traverser { + final ObjectToLong transformer; + final LongByLongToLong reducer; + final long basis; + long result; + MapReduceKeysToLongTask rights, nextRight; + MapReduceKeysToLongTask + (ConcurrentHashMapV8 m, Traverser p, int b, + MapReduceKeysToLongTask nextRight, + ObjectToLong transformer, + long basis, + LongByLongToLong reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.basis = basis; this.reducer = reducer; + } + public final Long getRawResult() { return result; } + @SuppressWarnings("unchecked") public final void compute() { + final ObjectToLong transformer; + final LongByLongToLong reducer; + if ((transformer = this.transformer) != null && + (reducer = this.reducer) != null) { + long r = this.basis; + for (int b; (b = preSplit()) > 0;) + (rights = new MapReduceKeysToLongTask + (map, this, b, rights, transformer, r, reducer)).fork(); + while (advance() != null) + r = reducer.apply(r, transformer.apply((K)nextKey)); + result = r; + CountedCompleter c; + for (c = firstComplete(); c != null; c = c.nextComplete()) { + MapReduceKeysToLongTask + t = (MapReduceKeysToLongTask)c, + s = t.rights; + while (s != null) { + t.result = reducer.apply(t.result, s.result); + s = t.rights = s.nextRight; + } + } + } + } + } + + @SuppressWarnings("serial") static final class MapReduceValuesToLongTask + extends Traverser { + final ObjectToLong transformer; + final LongByLongToLong reducer; + final long basis; + long result; + MapReduceValuesToLongTask rights, nextRight; + MapReduceValuesToLongTask + (ConcurrentHashMapV8 m, Traverser p, int b, + MapReduceValuesToLongTask nextRight, + ObjectToLong transformer, + long basis, + LongByLongToLong reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.basis = basis; this.reducer = reducer; + } + public final Long getRawResult() { return result; } + @SuppressWarnings("unchecked") public final void compute() { + final ObjectToLong transformer; + final LongByLongToLong reducer; + if ((transformer = this.transformer) != null && + (reducer = this.reducer) != null) { + long r = this.basis; + for (int b; (b = preSplit()) > 0;) + (rights = new MapReduceValuesToLongTask + (map, this, b, rights, transformer, r, reducer)).fork(); + V v; + while ((v = advance()) != null) + r = reducer.apply(r, transformer.apply(v)); + result = r; + CountedCompleter c; + for (c = firstComplete(); c != null; c = c.nextComplete()) { + MapReduceValuesToLongTask + t = (MapReduceValuesToLongTask)c, + s = t.rights; + while (s != null) { + t.result = reducer.apply(t.result, s.result); + s = t.rights = s.nextRight; + } + } + } + } + } + + @SuppressWarnings("serial") static final class MapReduceEntriesToLongTask + extends Traverser { + final ObjectToLong> transformer; + final LongByLongToLong reducer; + final long basis; + long result; + MapReduceEntriesToLongTask rights, nextRight; + MapReduceEntriesToLongTask + (ConcurrentHashMapV8 m, Traverser p, int b, + MapReduceEntriesToLongTask nextRight, + ObjectToLong> transformer, + long basis, + LongByLongToLong reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.basis = basis; this.reducer = reducer; + } + public final Long getRawResult() { return result; } + @SuppressWarnings("unchecked") public final void compute() { + final ObjectToLong> transformer; + final LongByLongToLong reducer; + if ((transformer = this.transformer) != null && + (reducer = this.reducer) != null) { + long r = this.basis; + for (int b; (b = preSplit()) > 0;) + (rights = new MapReduceEntriesToLongTask + (map, this, b, rights, transformer, r, reducer)).fork(); + V v; + while ((v = advance()) != null) + r = reducer.apply(r, transformer.apply(entryFor((K)nextKey, + v))); + result = r; + CountedCompleter c; + for (c = firstComplete(); c != null; c = c.nextComplete()) { + MapReduceEntriesToLongTask + t = (MapReduceEntriesToLongTask)c, + s = t.rights; + while (s != null) { + t.result = reducer.apply(t.result, s.result); + s = t.rights = s.nextRight; + } + } + } + } + } + + @SuppressWarnings("serial") static final class MapReduceMappingsToLongTask + extends Traverser { + final ObjectByObjectToLong transformer; + final LongByLongToLong reducer; + final long basis; + long result; + MapReduceMappingsToLongTask rights, nextRight; + MapReduceMappingsToLongTask + (ConcurrentHashMapV8 m, Traverser p, int b, + MapReduceMappingsToLongTask nextRight, + ObjectByObjectToLong transformer, + long basis, + LongByLongToLong reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.basis = basis; this.reducer = reducer; + } + public final Long getRawResult() { return result; } + @SuppressWarnings("unchecked") public final void compute() { + final ObjectByObjectToLong transformer; + final LongByLongToLong reducer; + if ((transformer = this.transformer) != null && + (reducer = this.reducer) != null) { + long r = this.basis; + for (int b; (b = preSplit()) > 0;) + (rights = new MapReduceMappingsToLongTask + (map, this, b, rights, transformer, r, reducer)).fork(); + V v; + while ((v = advance()) != null) + r = reducer.apply(r, transformer.apply((K)nextKey, v)); + result = r; + CountedCompleter c; + for (c = firstComplete(); c != null; c = c.nextComplete()) { + MapReduceMappingsToLongTask + t = (MapReduceMappingsToLongTask)c, + s = t.rights; + while (s != null) { + t.result = reducer.apply(t.result, s.result); + s = t.rights = s.nextRight; + } + } + } + } + } + + @SuppressWarnings("serial") static final class MapReduceKeysToIntTask + extends Traverser { + final ObjectToInt transformer; + final IntByIntToInt reducer; + final int basis; + int result; + MapReduceKeysToIntTask rights, nextRight; + MapReduceKeysToIntTask + (ConcurrentHashMapV8 m, Traverser p, int b, + MapReduceKeysToIntTask nextRight, + ObjectToInt transformer, + int basis, + IntByIntToInt reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.basis = basis; this.reducer = reducer; + } + public final Integer getRawResult() { return result; } + @SuppressWarnings("unchecked") public final void compute() { + final ObjectToInt transformer; + final IntByIntToInt reducer; + if ((transformer = this.transformer) != null && + (reducer = this.reducer) != null) { + int r = this.basis; + for (int b; (b = preSplit()) > 0;) + (rights = new MapReduceKeysToIntTask + (map, this, b, rights, transformer, r, reducer)).fork(); + while (advance() != null) + r = reducer.apply(r, transformer.apply((K)nextKey)); + result = r; + CountedCompleter c; + for (c = firstComplete(); c != null; c = c.nextComplete()) { + MapReduceKeysToIntTask + t = (MapReduceKeysToIntTask)c, + s = t.rights; + while (s != null) { + t.result = reducer.apply(t.result, s.result); + s = t.rights = s.nextRight; + } + } + } + } + } + + @SuppressWarnings("serial") static final class MapReduceValuesToIntTask + extends Traverser { + final ObjectToInt transformer; + final IntByIntToInt reducer; + final int basis; + int result; + MapReduceValuesToIntTask rights, nextRight; + MapReduceValuesToIntTask + (ConcurrentHashMapV8 m, Traverser p, int b, + MapReduceValuesToIntTask nextRight, + ObjectToInt transformer, + int basis, + IntByIntToInt reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.basis = basis; this.reducer = reducer; + } + public final Integer getRawResult() { return result; } + @SuppressWarnings("unchecked") public final void compute() { + final ObjectToInt transformer; + final IntByIntToInt reducer; + if ((transformer = this.transformer) != null && + (reducer = this.reducer) != null) { + int r = this.basis; + for (int b; (b = preSplit()) > 0;) + (rights = new MapReduceValuesToIntTask + (map, this, b, rights, transformer, r, reducer)).fork(); + V v; + while ((v = advance()) != null) + r = reducer.apply(r, transformer.apply(v)); + result = r; + CountedCompleter c; + for (c = firstComplete(); c != null; c = c.nextComplete()) { + MapReduceValuesToIntTask + t = (MapReduceValuesToIntTask)c, + s = t.rights; + while (s != null) { + t.result = reducer.apply(t.result, s.result); + s = t.rights = s.nextRight; + } + } + } + } + } + + @SuppressWarnings("serial") static final class MapReduceEntriesToIntTask + extends Traverser { + final ObjectToInt> transformer; + final IntByIntToInt reducer; + final int basis; + int result; + MapReduceEntriesToIntTask rights, nextRight; + MapReduceEntriesToIntTask + (ConcurrentHashMapV8 m, Traverser p, int b, + MapReduceEntriesToIntTask nextRight, + ObjectToInt> transformer, + int basis, + IntByIntToInt reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.basis = basis; this.reducer = reducer; + } + public final Integer getRawResult() { return result; } + @SuppressWarnings("unchecked") public final void compute() { + final ObjectToInt> transformer; + final IntByIntToInt reducer; + if ((transformer = this.transformer) != null && + (reducer = this.reducer) != null) { + int r = this.basis; + for (int b; (b = preSplit()) > 0;) + (rights = new MapReduceEntriesToIntTask + (map, this, b, rights, transformer, r, reducer)).fork(); + V v; + while ((v = advance()) != null) + r = reducer.apply(r, transformer.apply(entryFor((K)nextKey, + v))); + result = r; + CountedCompleter c; + for (c = firstComplete(); c != null; c = c.nextComplete()) { + MapReduceEntriesToIntTask + t = (MapReduceEntriesToIntTask)c, + s = t.rights; + while (s != null) { + t.result = reducer.apply(t.result, s.result); + s = t.rights = s.nextRight; + } + } + } + } + } + + @SuppressWarnings("serial") static final class MapReduceMappingsToIntTask + extends Traverser { + final ObjectByObjectToInt transformer; + final IntByIntToInt reducer; + final int basis; + int result; + MapReduceMappingsToIntTask rights, nextRight; + MapReduceMappingsToIntTask + (ConcurrentHashMapV8 m, Traverser p, int b, + MapReduceMappingsToIntTask nextRight, + ObjectByObjectToInt transformer, + int basis, + IntByIntToInt reducer) { + super(m, p, b); this.nextRight = nextRight; + this.transformer = transformer; + this.basis = basis; this.reducer = reducer; + } + public final Integer getRawResult() { return result; } + @SuppressWarnings("unchecked") public final void compute() { + final ObjectByObjectToInt transformer; + final IntByIntToInt reducer; + if ((transformer = this.transformer) != null && + (reducer = this.reducer) != null) { + int r = this.basis; + for (int b; (b = preSplit()) > 0;) + (rights = new MapReduceMappingsToIntTask + (map, this, b, rights, transformer, r, reducer)).fork(); + V v; + while ((v = advance()) != null) + r = reducer.apply(r, transformer.apply((K)nextKey, v)); + result = r; + CountedCompleter c; + for (c = firstComplete(); c != null; c = c.nextComplete()) { + MapReduceMappingsToIntTask + t = (MapReduceMappingsToIntTask)c, + s = t.rights; + while (s != null) { + t.result = reducer.apply(t.result, s.result); + s = t.rights = s.nextRight; + } } } } } // Unsafe mechanics - private static final sun.misc.Unsafe UNSAFE; - private static final long counterOffset; - private static final long sizeCtlOffset; + private static final sun.misc.Unsafe U; + private static final long SIZECTL; + private static final long TRANSFERINDEX; + private static final long TRANSFERORIGIN; + private static final long BASECOUNT; + private static final long COUNTERBUSY; + private static final long CELLVALUE; private static final long ABASE; private static final int ASHIFT; static { - int ss; try { - UNSAFE = getUnsafe(); + U = getUnsafe(); Class k = ConcurrentHashMapV8.class; - counterOffset = UNSAFE.objectFieldOffset - (k.getDeclaredField("counter")); - sizeCtlOffset = UNSAFE.objectFieldOffset + SIZECTL = U.objectFieldOffset (k.getDeclaredField("sizeCtl")); + TRANSFERINDEX = U.objectFieldOffset + (k.getDeclaredField("transferIndex")); + TRANSFERORIGIN = U.objectFieldOffset + (k.getDeclaredField("transferOrigin")); + BASECOUNT = U.objectFieldOffset + (k.getDeclaredField("baseCount")); + COUNTERBUSY = U.objectFieldOffset + (k.getDeclaredField("counterBusy")); + Class ck = CounterCell.class; + CELLVALUE = U.objectFieldOffset + (ck.getDeclaredField("value")); Class sc = Node[].class; - ABASE = UNSAFE.arrayBaseOffset(sc); - ss = UNSAFE.arrayIndexScale(sc); + ABASE = U.arrayBaseOffset(sc); + int scale = U.arrayIndexScale(sc); + if ((scale & (scale - 1)) != 0) + throw new Error("data type scale not a power of two"); + ASHIFT = 31 - Integer.numberOfLeadingZeros(scale); } catch (Exception e) { throw new Error(e); } - if ((ss & (ss-1)) != 0) - throw new Error("data type scale not a power of two"); - ASHIFT = 31 - Integer.numberOfLeadingZeros(ss); } /** @@ -3343,22 +6834,23 @@ public class ConcurrentHashMapV8 private static sun.misc.Unsafe getUnsafe() { try { return sun.misc.Unsafe.getUnsafe(); - } catch (SecurityException se) { - try { - return java.security.AccessController.doPrivileged - (new java.security - .PrivilegedExceptionAction() { - public sun.misc.Unsafe run() throws Exception { - java.lang.reflect.Field f = sun.misc - .Unsafe.class.getDeclaredField("theUnsafe"); - f.setAccessible(true); - return (sun.misc.Unsafe) f.get(null); - }}); - } catch (java.security.PrivilegedActionException e) { - throw new RuntimeException("Could not initialize intrinsics", - e.getCause()); - } + } catch (SecurityException tryReflectionInstead) {} + try { + return java.security.AccessController.doPrivileged + (new java.security.PrivilegedExceptionAction() { + public sun.misc.Unsafe run() throws Exception { + Class k = sun.misc.Unsafe.class; + for (java.lang.reflect.Field f : k.getDeclaredFields()) { + f.setAccessible(true); + Object x = f.get(null); + if (k.isInstance(x)) + return k.cast(x); + } + throw new NoSuchFieldError("the Unsafe"); + }}); + } catch (java.security.PrivilegedActionException e) { + throw new RuntimeException("Could not initialize intrinsics", + e.getCause()); } } - }