ViewVC Help
View File | Revision Log | Show Annotations | Download File | Root Listing
root/jsr166/jsr166/src/jsr166e/ConcurrentHashMapV8.java
(Generate patch)

Comparing jsr166/src/jsr166e/ConcurrentHashMapV8.java (file contents):
Revision 1.17 by jsr166, Sat Sep 10 00:53:14 2011 UTC vs.
Revision 1.81 by dl, Sat Dec 8 14:10:38 2012 UTC

# Line 5 | Line 5
5   */
6  
7   package jsr166e;
8 < import jsr166e.LongAdder;
8 >
9 > import java.util.Comparator;
10 > import java.util.Arrays;
11   import java.util.Map;
12   import java.util.Set;
13   import java.util.Collection;
# Line 19 | Line 21 | import java.util.Enumeration;
21   import java.util.ConcurrentModificationException;
22   import java.util.NoSuchElementException;
23   import java.util.concurrent.ConcurrentMap;
24 + import java.util.concurrent.ThreadLocalRandom;
25 + import java.util.concurrent.locks.LockSupport;
26 + import java.util.concurrent.locks.AbstractQueuedSynchronizer;
27 + import java.util.concurrent.atomic.AtomicReference;
28 +
29 + import java.io.Serializable;
30 +
31 + import java.util.Comparator;
32 + import java.util.Arrays;
33 + import java.util.Map;
34 + import java.util.Set;
35 + import java.util.Collection;
36 + import java.util.AbstractMap;
37 + import java.util.AbstractSet;
38 + import java.util.AbstractCollection;
39 + import java.util.Hashtable;
40 + import java.util.HashMap;
41 + import java.util.Iterator;
42 + import java.util.Enumeration;
43 + import java.util.ConcurrentModificationException;
44 + import java.util.NoSuchElementException;
45 + import java.util.concurrent.ConcurrentMap;
46 + import java.util.concurrent.ThreadLocalRandom;
47 + import java.util.concurrent.locks.LockSupport;
48 + import java.util.concurrent.locks.AbstractQueuedSynchronizer;
49 + import java.util.concurrent.atomic.AtomicReference;
50 +
51   import java.io.Serializable;
52  
53   /**
# Line 33 | Line 62 | import java.io.Serializable;
62   * interoperable with {@code Hashtable} in programs that rely on its
63   * thread safety but not on its synchronization details.
64   *
65 < * <p> Retrieval operations (including {@code get}) generally do not
65 > * <p>Retrieval operations (including {@code get}) generally do not
66   * block, so may overlap with update operations (including {@code put}
67   * and {@code remove}). Retrievals reflect the results of the most
68   * recently <em>completed</em> update operations holding upon their
69 < * onset.  For aggregate operations such as {@code putAll} and {@code
70 < * clear}, concurrent retrievals may reflect insertion or removal of
71 < * only some entries.  Similarly, Iterators and Enumerations return
72 < * elements reflecting the state of the hash table at some point at or
73 < * since the creation of the iterator/enumeration.  They do
74 < * <em>not</em> throw {@link ConcurrentModificationException}.
75 < * However, iterators are designed to be used by only one thread at a
76 < * time.  Bear in mind that the results of aggregate status methods
77 < * including {@code size}, {@code isEmpty}, and {@code containsValue}
78 < * are typically useful only when a map is not undergoing concurrent
79 < * updates in other threads.  Otherwise the results of these methods
80 < * reflect transient states that may be adequate for monitoring
81 < * or estimation purposes, but not for program control.
69 > * onset. (More formally, an update operation for a given key bears a
70 > * <em>happens-before</em> relation with any (non-null) retrieval for
71 > * that key reporting the updated value.)  For aggregate operations
72 > * such as {@code putAll} and {@code clear}, concurrent retrievals may
73 > * reflect insertion or removal of only some entries.  Similarly,
74 > * Iterators and Enumerations return elements reflecting the state of
75 > * the hash table at some point at or since the creation of the
76 > * iterator/enumeration.  They do <em>not</em> throw {@link
77 > * ConcurrentModificationException}.  However, iterators are designed
78 > * to be used by only one thread at a time.  Bear in mind that the
79 > * results of aggregate status methods including {@code size}, {@code
80 > * isEmpty}, and {@code containsValue} are typically useful only when
81 > * a map is not undergoing concurrent updates in other threads.
82 > * Otherwise the results of these methods reflect transient states
83 > * that may be adequate for monitoring or estimation purposes, but not
84 > * for program control.
85   *
86 < * <p> The table is dynamically expanded when there are too many
86 > * <p>The table is dynamically expanded when there are too many
87   * collisions (i.e., keys that have distinct hash codes but fall into
88   * the same slot modulo the table size), with the expected average
89 < * effect of maintaining roughly two bins per mapping. There may be
90 < * much variance around this average as mappings are added and
91 < * removed, but overall, this maintains a commonly accepted time/space
92 < * tradeoff for hash tables.  However, resizing this or any other kind
93 < * of hash table may be a relatively slow operation. When possible, it
94 < * is a good idea to provide a size estimate as an optional {@code
89 > * effect of maintaining roughly two bins per mapping (corresponding
90 > * to a 0.75 load factor threshold for resizing). There may be much
91 > * variance around this average as mappings are added and removed, but
92 > * overall, this maintains a commonly accepted time/space tradeoff for
93 > * hash tables.  However, resizing this or any other kind of hash
94 > * table may be a relatively slow operation. When possible, it is a
95 > * good idea to provide a size estimate as an optional {@code
96   * initialCapacity} constructor argument. An additional optional
97   * {@code loadFactor} constructor argument provides a further means of
98   * customizing initial table capacity by specifying the table density
# Line 68 | Line 101 | import java.io.Serializable;
101   * versions of this class, constructors may optionally specify an
102   * expected {@code concurrencyLevel} as an additional hint for
103   * internal sizing.  Note that using many keys with exactly the same
104 < * {@code hashCode{}} is a sure way to slow down performance of any
104 > * {@code hashCode()} is a sure way to slow down performance of any
105   * hash table.
106   *
107 + * <p>A {@link Set} projection of a ConcurrentHashMapV8 may be created
108 + * (using {@link #newKeySet()} or {@link #newKeySet(int)}), or viewed
109 + * (using {@link #keySet(Object)} when only keys are of interest, and the
110 + * mapped values are (perhaps transiently) not used or all take the
111 + * same mapping value.
112 + *
113 + * <p>A ConcurrentHashMapV8 can be used as scalable frequency map (a
114 + * form of histogram or multiset) by using {@link LongAdder} values
115 + * and initializing via {@link #computeIfAbsent}. For example, to add
116 + * a count to a {@code ConcurrentHashMapV8<String,LongAdder> freqs}, you
117 + * can use {@code freqs.computeIfAbsent(k -> new
118 + * LongAdder()).increment();}
119 + *
120   * <p>This class and its views and iterators implement all of the
121   * <em>optional</em> methods of the {@link Map} and {@link Iterator}
122   * interfaces.
123   *
124 < * <p> Like {@link Hashtable} but unlike {@link HashMap}, this class
124 > * <p>Like {@link Hashtable} but unlike {@link HashMap}, this class
125   * does <em>not</em> allow {@code null} to be used as a key or value.
126   *
127 + * <p>ConcurrentHashMapV8s support parallel operations using the {@link
128 + * ForkJoinPool#commonPool}. (Tasks that may be used in other contexts
129 + * are available in class {@link ForkJoinTasks}). These operations are
130 + * designed to be safely, and often sensibly, applied even with maps
131 + * that are being concurrently updated by other threads; for example,
132 + * when computing a snapshot summary of the values in a shared
133 + * registry.  There are three kinds of operation, each with four
134 + * forms, accepting functions with Keys, Values, Entries, and (Key,
135 + * Value) arguments and/or return values. (The first three forms are
136 + * also available via the {@link #keySet()}, {@link #values()} and
137 + * {@link #entrySet()} views). Because the elements of a
138 + * ConcurrentHashMapV8 are not ordered in any particular way, and may be
139 + * processed in different orders in different parallel executions, the
140 + * correctness of supplied functions should not depend on any
141 + * ordering, or on any other objects or values that may transiently
142 + * change while computation is in progress; and except for forEach
143 + * actions, should ideally be side-effect-free.
144 + *
145 + * <ul>
146 + * <li> forEach: Perform a given action on each element.
147 + * A variant form applies a given transformation on each element
148 + * before performing the action.</li>
149 + *
150 + * <li> search: Return the first available non-null result of
151 + * applying a given function on each element; skipping further
152 + * search when a result is found.</li>
153 + *
154 + * <li> reduce: Accumulate each element.  The supplied reduction
155 + * function cannot rely on ordering (more formally, it should be
156 + * both associative and commutative).  There are five variants:
157 + *
158 + * <ul>
159 + *
160 + * <li> Plain reductions. (There is not a form of this method for
161 + * (key, value) function arguments since there is no corresponding
162 + * return type.)</li>
163 + *
164 + * <li> Mapped reductions that accumulate the results of a given
165 + * function applied to each element.</li>
166 + *
167 + * <li> Reductions to scalar doubles, longs, and ints, using a
168 + * given basis value.</li>
169 + *
170 + * </li>
171 + * </ul>
172 + * </ul>
173 + *
174 + * <p>The concurrency properties of bulk operations follow
175 + * from those of ConcurrentHashMapV8: Any non-null result returned
176 + * from {@code get(key)} and related access methods bears a
177 + * happens-before relation with the associated insertion or
178 + * update.  The result of any bulk operation reflects the
179 + * composition of these per-element relations (but is not
180 + * necessarily atomic with respect to the map as a whole unless it
181 + * is somehow known to be quiescent).  Conversely, because keys
182 + * and values in the map are never null, null serves as a reliable
183 + * atomic indicator of the current lack of any result.  To
184 + * maintain this property, null serves as an implicit basis for
185 + * all non-scalar reduction operations. For the double, long, and
186 + * int versions, the basis should be one that, when combined with
187 + * any other value, returns that other value (more formally, it
188 + * should be the identity element for the reduction). Most common
189 + * reductions have these properties; for example, computing a sum
190 + * with basis 0 or a minimum with basis MAX_VALUE.
191 + *
192 + * <p>Search and transformation functions provided as arguments
193 + * should similarly return null to indicate the lack of any result
194 + * (in which case it is not used). In the case of mapped
195 + * reductions, this also enables transformations to serve as
196 + * filters, returning null (or, in the case of primitive
197 + * specializations, the identity basis) if the element should not
198 + * be combined. You can create compound transformations and
199 + * filterings by composing them yourself under this "null means
200 + * there is nothing there now" rule before using them in search or
201 + * reduce operations.
202 + *
203 + * <p>Methods accepting and/or returning Entry arguments maintain
204 + * key-value associations. They may be useful for example when
205 + * finding the key for the greatest value. Note that "plain" Entry
206 + * arguments can be supplied using {@code new
207 + * AbstractMap.SimpleEntry(k,v)}.
208 + *
209 + * <p>Bulk operations may complete abruptly, throwing an
210 + * exception encountered in the application of a supplied
211 + * function. Bear in mind when handling such exceptions that other
212 + * concurrently executing functions could also have thrown
213 + * exceptions, or would have done so if the first exception had
214 + * not occurred.
215 + *
216 + * <p>Parallel speedups for bulk operations compared to sequential
217 + * processing are common but not guaranteed.  Operations involving
218 + * brief functions on small maps may execute more slowly than
219 + * sequential loops if the underlying work to parallelize the
220 + * computation is more expensive than the computation itself.
221 + * Similarly, parallelization may not lead to much actual parallelism
222 + * if all processors are busy performing unrelated tasks.
223 + *
224 + * <p>All arguments to all task methods must be non-null.
225 + *
226 + * <p><em>jsr166e note: During transition, this class
227 + * uses nested functional interfaces with different names but the
228 + * same forms as those expected for JDK8.</em>
229 + *
230   * <p>This class is a member of the
231   * <a href="{@docRoot}/../technotes/guides/collections/index.html">
232   * Java Collections Framework</a>.
233   *
85 * <p><em>jsr166e note: This class is a candidate replacement for
86 * java.util.concurrent.ConcurrentHashMap.<em>
87 *
234   * @since 1.5
235   * @author Doug Lea
236   * @param <K> the type of keys maintained by this map
237   * @param <V> the type of mapped values
238   */
239   public class ConcurrentHashMapV8<K, V>
240 <        implements ConcurrentMap<K, V>, Serializable {
240 >    implements ConcurrentMap<K, V>, Serializable {
241      private static final long serialVersionUID = 7249069246763182397L;
242  
243      /**
244 <     * A function computing a mapping from the given key to a value,
245 <     * or {@code null} if there is no mapping. This is a place-holder
246 <     * for an upcoming JDK8 interface.
247 <     */
248 <    public static interface MappingFunction<K, V> {
249 <        /**
250 <         * Returns a value for the given key, or null if there is no
251 <         * mapping. If this function throws an (unchecked) exception,
252 <         * the exception is rethrown to its caller, and no mapping is
253 <         * recorded.  Because this function is invoked within
254 <         * atomicity control, the computation should be short and
255 <         * simple. The most common usage is to construct a new object
256 <         * serving as an initial mapped value.
244 >     * A partitionable iterator. A Spliterator can be traversed
245 >     * directly, but can also be partitioned (before traversal) by
246 >     * creating another Spliterator that covers a non-overlapping
247 >     * portion of the elements, and so may be amenable to parallel
248 >     * execution.
249 >     *
250 >     * <p>This interface exports a subset of expected JDK8
251 >     * functionality.
252 >     *
253 >     * <p>Sample usage: Here is one (of the several) ways to compute
254 >     * the sum of the values held in a map using the ForkJoin
255 >     * framework. As illustrated here, Spliterators are well suited to
256 >     * designs in which a task repeatedly splits off half its work
257 >     * into forked subtasks until small enough to process directly,
258 >     * and then joins these subtasks. Variants of this style can also
259 >     * be used in completion-based designs.
260 >     *
261 >     * <pre>
262 >     * {@code ConcurrentHashMapV8<String, Long> m = ...
263 >     * // split as if have 8 * parallelism, for load balance
264 >     * int n = m.size();
265 >     * int p = aForkJoinPool.getParallelism() * 8;
266 >     * int split = (n < p)? n : p;
267 >     * long sum = aForkJoinPool.invoke(new SumValues(m.valueSpliterator(), split, null));
268 >     * // ...
269 >     * static class SumValues extends RecursiveTask<Long> {
270 >     *   final Spliterator<Long> s;
271 >     *   final int split;             // split while > 1
272 >     *   final SumValues nextJoin;    // records forked subtasks to join
273 >     *   SumValues(Spliterator<Long> s, int depth, SumValues nextJoin) {
274 >     *     this.s = s; this.depth = depth; this.nextJoin = nextJoin;
275 >     *   }
276 >     *   public Long compute() {
277 >     *     long sum = 0;
278 >     *     SumValues subtasks = null; // fork subtasks
279 >     *     for (int s = split >>> 1; s > 0; s >>>= 1)
280 >     *       (subtasks = new SumValues(s.split(), s, subtasks)).fork();
281 >     *     while (s.hasNext())        // directly process remaining elements
282 >     *       sum += s.next();
283 >     *     for (SumValues t = subtasks; t != null; t = t.nextJoin)
284 >     *       sum += t.join();         // collect subtask results
285 >     *     return sum;
286 >     *   }
287 >     * }
288 >     * }</pre>
289 >     */
290 >    public static interface Spliterator<T> extends Iterator<T> {
291 >        /**
292 >         * Returns a Spliterator covering approximately half of the
293 >         * elements, guaranteed not to overlap with those subsequently
294 >         * returned by this Spliterator.  After invoking this method,
295 >         * the current Spliterator will <em>not</em> produce any of
296 >         * the elements of the returned Spliterator, but the two
297 >         * Spliterators together will produce all of the elements that
298 >         * would have been produced by this Spliterator had this
299 >         * method not been called. The exact number of elements
300 >         * produced by the returned Spliterator is not guaranteed, and
301 >         * may be zero (i.e., with {@code hasNext()} reporting {@code
302 >         * false}) if this Spliterator cannot be further split.
303           *
304 <         * @param key the (non-null) key
305 <         * @return a value, or null if none
304 >         * @return a Spliterator covering approximately half of the
305 >         * elements
306 >         * @throws IllegalStateException if this Spliterator has
307 >         * already commenced traversing elements
308           */
309 <        V map(K key);
309 >        Spliterator<T> split();
310      }
311  
312 +
313      /*
314       * Overview:
315       *
316       * The primary design goal of this hash table is to maintain
317       * concurrent readability (typically method get(), but also
318       * iterators and related methods) while minimizing update
319 <     * contention.
319 >     * contention. Secondary goals are to keep space consumption about
320 >     * the same or better than java.util.HashMap, and to support high
321 >     * initial insertion rates on an empty table by many threads.
322       *
323       * Each key-value mapping is held in a Node.  Because Node fields
324       * can contain special values, they are defined using plain Object
# Line 129 | Line 326 | public class ConcurrentHashMapV8<K, V>
326       * work off Object types. And similarly, so do the internal
327       * methods of auxiliary iterator and view classes.  All public
328       * generic typed methods relay in/out of these internal methods,
329 <     * supplying null-checks and casts as needed.
329 >     * supplying null-checks and casts as needed. This also allows
330 >     * many of the public methods to be factored into a smaller number
331 >     * of internal methods (although sadly not so for the five
332 >     * variants of put-related operations). The validation-based
333 >     * approach explained below leads to a lot of code sprawl because
334 >     * retry-control precludes factoring into smaller methods.
335       *
336       * The table is lazily initialized to a power-of-two size upon the
337 <     * first insertion.  Each bin in the table contains a list of
338 <     * Nodes (most often, zero or one Node).  Table accesses require
339 <     * volatile/atomic reads, writes, and CASes.  Because there is no
340 <     * other way to arrange this without adding further indirections,
341 <     * we use intrinsics (sun.misc.Unsafe) operations.  The lists of
342 <     * nodes within bins are always accurately traversable under
343 <     * volatile reads, so long as lookups check hash code and
344 <     * non-nullness of value before checking key equality. (All valid
345 <     * hash codes are nonnegative. Negative values are reserved for
346 <     * special forwarding nodes; see below.)
337 >     * first insertion.  Each bin in the table normally contains a
338 >     * list of Nodes (most often, the list has only zero or one Node).
339 >     * Table accesses require volatile/atomic reads, writes, and
340 >     * CASes.  Because there is no other way to arrange this without
341 >     * adding further indirections, we use intrinsics
342 >     * (sun.misc.Unsafe) operations.  The lists of nodes within bins
343 >     * are always accurately traversable under volatile reads, so long
344 >     * as lookups check hash code and non-nullness of value before
345 >     * checking key equality.
346 >     *
347 >     * We use the top two bits of Node hash fields for control
348 >     * purposes -- they are available anyway because of addressing
349 >     * constraints.  As explained further below, these top bits are
350 >     * used as follows:
351 >     *  00 - Normal
352 >     *  01 - Locked
353 >     *  11 - Locked and may have a thread waiting for lock
354 >     *  10 - Node is a forwarding node
355 >     *
356 >     * The lower 30 bits of each Node's hash field contain a
357 >     * transformation of the key's hash code, except for forwarding
358 >     * nodes, for which the lower bits are zero (and so always have
359 >     * hash field == MOVED).
360       *
361 <     * Insertion (via put or putIfAbsent) of the first node in an
361 >     * Insertion (via put or its variants) of the first node in an
362       * empty bin is performed by just CASing it to the bin.  This is
363 <     * on average by far the most common case for put operations.
364 <     * Other update operations (insert, delete, and replace) require
365 <     * locks.  We do not want to waste the space required to associate
366 <     * a distinct lock object with each bin, so instead use the first
367 <     * node of a bin list itself as a lock, using plain "synchronized"
368 <     * locks. These save space and we can live with block-structured
369 <     * lock/unlock operations. Using the first node of a list as a
370 <     * lock does not by itself suffice though: When a node is locked,
371 <     * any update must first validate that it is still the first node,
372 <     * and retry if not. Because new nodes are always appended to
373 <     * lists, once a node is first in a bin, it remains first until
374 <     * deleted or the bin becomes invalidated.  However, operations
375 <     * that only conditionally update can and sometimes do inspect
376 <     * nodes until the point of update. This is a converse of sorts to
377 <     * the lazy locking technique described by Herlihy & Shavit.
363 >     * by far the most common case for put operations under most
364 >     * key/hash distributions.  Other update operations (insert,
365 >     * delete, and replace) require locks.  We do not want to waste
366 >     * the space required to associate a distinct lock object with
367 >     * each bin, so instead use the first node of a bin list itself as
368 >     * a lock. Blocking support for these locks relies on the builtin
369 >     * "synchronized" monitors.  However, we also need a tryLock
370 >     * construction, so we overlay these by using bits of the Node
371 >     * hash field for lock control (see above), and so normally use
372 >     * builtin monitors only for blocking and signalling using
373 >     * wait/notifyAll constructions. See Node.tryAwaitLock.
374 >     *
375 >     * Using the first node of a list as a lock does not by itself
376 >     * suffice though: When a node is locked, any update must first
377 >     * validate that it is still the first node after locking it, and
378 >     * retry if not. Because new nodes are always appended to lists,
379 >     * once a node is first in a bin, it remains first until deleted
380 >     * or the bin becomes invalidated (upon resizing).  However,
381 >     * operations that only conditionally update may inspect nodes
382 >     * until the point of update. This is a converse of sorts to the
383 >     * lazy locking technique described by Herlihy & Shavit.
384       *
385 <     * The main disadvantage of this approach is that most update
385 >     * The main disadvantage of per-bin locks is that other update
386       * operations on other nodes in a bin list protected by the same
387       * lock can stall, for example when user equals() or mapping
388 <     * functions take a long time.  However, statistically, this is
389 <     * not a common enough problem to outweigh the time/space overhead
390 <     * of alternatives: Under random hash codes, the frequency of
170 <     * nodes in bins follows a Poisson distribution
388 >     * functions take a long time.  However, statistically, under
389 >     * random hash codes, this is not a common problem.  Ideally, the
390 >     * frequency of nodes in bins follows a Poisson distribution
391       * (http://en.wikipedia.org/wiki/Poisson_distribution) with a
392       * parameter of about 0.5 on average, given the resizing threshold
393       * of 0.75, although with a large variance because of resizing
394       * granularity. Ignoring variance, the expected occurrences of
395       * list size k are (exp(-0.5) * pow(0.5, k) / factorial(k)). The
396 <     * first few values are:
396 >     * first values are:
397       *
398 <     * 0:    0.607
399 <     * 1:    0.303
400 <     * 2:    0.076
401 <     * 3:    0.012
402 <     * more: 0.002
398 >     * 0:    0.60653066
399 >     * 1:    0.30326533
400 >     * 2:    0.07581633
401 >     * 3:    0.01263606
402 >     * 4:    0.00157952
403 >     * 5:    0.00015795
404 >     * 6:    0.00001316
405 >     * 7:    0.00000094
406 >     * 8:    0.00000006
407 >     * more: less than 1 in ten million
408       *
409       * Lock contention probability for two threads accessing distinct
410 <     * elements is roughly 1 / (8 * #elements).  Function "spread"
411 <     * performs hashCode randomization that improves the likelihood
412 <     * that these assumptions hold unless users define exactly the
413 <     * same value for too many hashCodes.
414 <     *
415 <     * The table is resized when occupancy exceeds a threshold.  Only
416 <     * a single thread performs the resize (using field "resizing", to
417 <     * arrange exclusion), but the table otherwise remains usable for
418 <     * reads and updates. Resizing proceeds by transferring bins, one
419 <     * by one, from the table to the next table.  Upon transfer, the
420 <     * old table bin contains only a special forwarding node (with
421 <     * negative hash field) that contains the next table as its
422 <     * key. On encountering a forwarding node, access and update
423 <     * operations restart, using the new table. To ensure concurrent
424 <     * readability of traversals, transfers must proceed from the last
425 <     * bin (table.length - 1) up towards the first.  Upon seeing a
426 <     * forwarding node, traversals (see class InternalIterator)
427 <     * arrange to move to the new table for the rest of the traversal
428 <     * without revisiting nodes.  This constrains bin transfers to a
429 <     * particular order, and so can block indefinitely waiting for the
430 <     * next lock, and other threads cannot help with the transfer.
431 <     * However, expected stalls are infrequent enough to not warrant
432 <     * the additional overhead of access and iteration schemes that
433 <     * could admit out-of-order or concurrent bin transfers.
434 <     *
435 <     * This traversal scheme also applies to partial traversals of
436 <     * ranges of bins (via an alternate InternalIterator constructor)
437 <     * to support partitioned aggregate operations (that are not
438 <     * otherwise implemented yet).  Also, read-only operations give up
439 <     * if ever forwarded to a null table, which provides support for
440 <     * shutdown-style clearing, which is also not currently
441 <     * implemented.
410 >     * elements is roughly 1 / (8 * #elements) under random hashes.
411 >     *
412 >     * Actual hash code distributions encountered in practice
413 >     * sometimes deviate significantly from uniform randomness.  This
414 >     * includes the case when N > (1<<30), so some keys MUST collide.
415 >     * Similarly for dumb or hostile usages in which multiple keys are
416 >     * designed to have identical hash codes. Also, although we guard
417 >     * against the worst effects of this (see method spread), sets of
418 >     * hashes may differ only in bits that do not impact their bin
419 >     * index for a given power-of-two mask.  So we use a secondary
420 >     * strategy that applies when the number of nodes in a bin exceeds
421 >     * a threshold, and at least one of the keys implements
422 >     * Comparable.  These TreeBins use a balanced tree to hold nodes
423 >     * (a specialized form of red-black trees), bounding search time
424 >     * to O(log N).  Each search step in a TreeBin is around twice as
425 >     * slow as in a regular list, but given that N cannot exceed
426 >     * (1<<64) (before running out of addresses) this bounds search
427 >     * steps, lock hold times, etc, to reasonable constants (roughly
428 >     * 100 nodes inspected per operation worst case) so long as keys
429 >     * are Comparable (which is very common -- String, Long, etc).
430 >     * TreeBin nodes (TreeNodes) also maintain the same "next"
431 >     * traversal pointers as regular nodes, so can be traversed in
432 >     * iterators in the same way.
433 >     *
434 >     * The table is resized when occupancy exceeds a percentage
435 >     * threshold (nominally, 0.75, but see below).  Only a single
436 >     * thread performs the resize (using field "sizeCtl", to arrange
437 >     * exclusion), but the table otherwise remains usable for reads
438 >     * and updates. Resizing proceeds by transferring bins, one by
439 >     * one, from the table to the next table.  Because we are using
440 >     * power-of-two expansion, the elements from each bin must either
441 >     * stay at same index, or move with a power of two offset. We
442 >     * eliminate unnecessary node creation by catching cases where old
443 >     * nodes can be reused because their next fields won't change.  On
444 >     * average, only about one-sixth of them need cloning when a table
445 >     * doubles. The nodes they replace will be garbage collectable as
446 >     * soon as they are no longer referenced by any reader thread that
447 >     * may be in the midst of concurrently traversing table.  Upon
448 >     * transfer, the old table bin contains only a special forwarding
449 >     * node (with hash field "MOVED") that contains the next table as
450 >     * its key. On encountering a forwarding node, access and update
451 >     * operations restart, using the new table.
452 >     *
453 >     * Each bin transfer requires its bin lock. However, unlike other
454 >     * cases, a transfer can skip a bin if it fails to acquire its
455 >     * lock, and revisit it later (unless it is a TreeBin). Method
456 >     * rebuild maintains a buffer of TRANSFER_BUFFER_SIZE bins that
457 >     * have been skipped because of failure to acquire a lock, and
458 >     * blocks only if none are available (i.e., only very rarely).
459 >     * The transfer operation must also ensure that all accessible
460 >     * bins in both the old and new table are usable by any traversal.
461 >     * When there are no lock acquisition failures, this is arranged
462 >     * simply by proceeding from the last bin (table.length - 1) up
463 >     * towards the first.  Upon seeing a forwarding node, traversals
464 >     * (see class Iter) arrange to move to the new table
465 >     * without revisiting nodes.  However, when any node is skipped
466 >     * during a transfer, all earlier table bins may have become
467 >     * visible, so are initialized with a reverse-forwarding node back
468 >     * to the old table until the new ones are established. (This
469 >     * sometimes requires transiently locking a forwarding node, which
470 >     * is possible under the above encoding.) These more expensive
471 >     * mechanics trigger only when necessary.
472 >     *
473 >     * The traversal scheme also applies to partial traversals of
474 >     * ranges of bins (via an alternate Traverser constructor)
475 >     * to support partitioned aggregate operations.  Also, read-only
476 >     * operations give up if ever forwarded to a null table, which
477 >     * provides support for shutdown-style clearing, which is also not
478 >     * currently implemented.
479       *
480       * Lazy table initialization minimizes footprint until first use,
481       * and also avoids resizings when the first operation is from a
482       * putAll, constructor with map argument, or deserialization.
483 <     * These cases attempt to override the targetCapacity used in
484 <     * growTable. These harmlessly fail to take effect in cases of
223 <     * races with other ongoing resizings. Uses of the threshold and
224 <     * targetCapacity during attempted initializations or resizings
225 <     * are racy but fall back on checks to preserve correctness.
483 >     * These cases attempt to override the initial capacity settings,
484 >     * but harmlessly fail to take effect in cases of races.
485       *
486       * The element count is maintained using a LongAdder, which avoids
487       * contention on updates but can encounter cache thrashing if read
488       * too frequently during concurrent access. To avoid reading so
489 <     * often, resizing is normally attempted only upon adding to a bin
490 <     * already holding two or more nodes. Under uniform hash
491 <     * distributions, the probability of this occurring at threshold
492 <     * is around 13%, meaning that only about 1 in 8 puts check
493 <     * threshold (and after resizing, many fewer do so). But this
494 <     * approximation has high variance for small table sizes, so we
495 <     * check on any collision for sizes <= 64.  Further, to increase
496 <     * the probability that a resize occurs soon enough, we offset the
497 <     * threshold (see THRESHOLD_OFFSET) by the expected number of puts
498 <     * between checks.
489 >     * often, resizing is attempted either when a bin lock is
490 >     * contended, or upon adding to a bin already holding two or more
491 >     * nodes (checked before adding in the xIfAbsent methods, after
492 >     * adding in others). Under uniform hash distributions, the
493 >     * probability of this occurring at threshold is around 13%,
494 >     * meaning that only about 1 in 8 puts check threshold (and after
495 >     * resizing, many fewer do so). But this approximation has high
496 >     * variance for small table sizes, so we check on any collision
497 >     * for sizes <= 64. The bulk putAll operation further reduces
498 >     * contention by only committing count updates upon these size
499 >     * checks.
500       *
501       * Maintaining API and serialization compatibility with previous
502       * versions of this class introduces several oddities. Mainly: We
503       * leave untouched but unused constructor arguments refering to
504 <     * concurrencyLevel. We also declare an unused "Segment" class
505 <     * that is instantiated in minimal form only when serializing.
504 >     * concurrencyLevel. We accept a loadFactor constructor argument,
505 >     * but apply it only to initial table capacity (which is the only
506 >     * time that we can guarantee to honor it.) We also declare an
507 >     * unused "Segment" class that is instantiated in minimal form
508 >     * only when serializing.
509       */
510  
511      /* ---------------- Constants -------------- */
# Line 250 | Line 513 | public class ConcurrentHashMapV8<K, V>
513      /**
514       * The largest possible table capacity.  This value must be
515       * exactly 1<<30 to stay within Java array allocation and indexing
516 <     * bounds for power of two table sizes.
516 >     * bounds for power of two table sizes, and is further required
517 >     * because the top two bits of 32bit hash fields are used for
518 >     * control purposes.
519       */
520      private static final int MAXIMUM_CAPACITY = 1 << 30;
521  
# Line 261 | Line 526 | public class ConcurrentHashMapV8<K, V>
526      private static final int DEFAULT_CAPACITY = 16;
527  
528      /**
529 <     * The load factor for this table. Overrides of this value in
530 <     * constructors affect only the initial table capacity.  The
266 <     * actual floating point value isn't normally used, because it is
267 <     * simpler to rely on the expression {@code n - (n >>> 2)} for the
268 <     * associated resizing threshold.
529 >     * The largest possible (non-power of two) array size.
530 >     * Needed by toArray and related methods.
531       */
532 <    private static final float LOAD_FACTOR = 0.75f;
532 >    static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8;
533  
534      /**
535 <     * The count value to offset thresholds to compensate for checking
536 <     * for the need to resize only when inserting into bins with two
275 <     * or more elements. See above for explanation.
535 >     * The default concurrency level for this table. Unused but
536 >     * defined for compatibility with previous versions of this class.
537       */
538 <    private static final int THRESHOLD_OFFSET = 8;
538 >    private static final int DEFAULT_CONCURRENCY_LEVEL = 16;
539  
540      /**
541 <     * The default concurrency level for this table. Unused except as
542 <     * a sizing hint, but defined for compatibility with previous
543 <     * versions of this class.
541 >     * The load factor for this table. Overrides of this value in
542 >     * constructors affect only the initial table capacity.  The
543 >     * actual floating point value isn't normally used -- it is
544 >     * simpler to use expressions such as {@code n - (n >>> 2)} for
545 >     * the associated resizing threshold.
546       */
547 <    private static final int DEFAULT_CONCURRENCY_LEVEL = 16;
285 <
286 <    /* ---------------- Nodes -------------- */
547 >    private static final float LOAD_FACTOR = 0.75f;
548  
549      /**
550 <     * Key-value entry. Note that this is never exported out as a
551 <     * user-visible Map.Entry. Nodes with a negative hash field are
552 <     * special, and do not contain user keys or values.  Otherwise,
292 <     * keys are never null, and null val fields indicate that a node
293 <     * is in the process of being deleted or created. For purposes of
294 <     * read-only, access, a key may be read before a val, but can only
295 <     * be used after checking val.  (For an update operation, when a
296 <     * lock is held on a node, order doesn't matter.)
550 >     * The buffer size for skipped bins during transfers. The
551 >     * value is arbitrary but should be large enough to avoid
552 >     * most locking stalls during resizes.
553       */
554 <    static final class Node {
299 <        final int hash;
300 <        final Object key;
301 <        volatile Object val;
302 <        volatile Node next;
303 <
304 <        Node(int hash, Object key, Object val, Node next) {
305 <            this.hash = hash;
306 <            this.key = key;
307 <            this.val = val;
308 <            this.next = next;
309 <        }
310 <    }
554 >    private static final int TRANSFER_BUFFER_SIZE = 32;
555  
556      /**
557 <     * Sign bit of node hash value indicating to use table in node.key.
557 >     * The bin count threshold for using a tree rather than list for a
558 >     * bin.  The value reflects the approximate break-even point for
559 >     * using tree-based operations.
560       */
561 <    private static final int SIGN_BIT = 0x80000000;
561 >    private static final int TREE_THRESHOLD = 8;
562 >
563 >    /*
564 >     * Encodings for special uses of Node hash fields. See above for
565 >     * explanation.
566 >     */
567 >    static final int MOVED     = 0x80000000; // hash field for forwarding nodes
568 >    static final int LOCKED    = 0x40000000; // set/tested only as a bit
569 >    static final int WAITING   = 0xc0000000; // both bits set/tested together
570 >    static final int HASH_BITS = 0x3fffffff; // usable bits of normal node hash
571  
572      /* ---------------- Fields -------------- */
573  
# Line 322 | Line 577 | public class ConcurrentHashMapV8<K, V>
577       */
578      transient volatile Node[] table;
579  
580 <    /** The counter maintaining number of elements. */
580 >    /**
581 >     * The counter maintaining number of elements.
582 >     */
583      private transient final LongAdder counter;
584 <    /** Nonzero when table is being initialized or resized. Updated via CAS. */
585 <    private transient volatile int resizing;
586 <    /** The next element count value upon which to resize the table. */
587 <    private transient int threshold;
588 <    /** The target capacity; volatile to cover initialization races. */
589 <    private transient volatile int targetCapacity;
584 >
585 >    /**
586 >     * Table initialization and resizing control.  When negative, the
587 >     * table is being initialized or resized. Otherwise, when table is
588 >     * null, holds the initial table size to use upon creation, or 0
589 >     * for default. After initialization, holds the next element count
590 >     * value upon which to resize the table.
591 >     */
592 >    private transient volatile int sizeCtl;
593  
594      // views
595 <    private transient KeySet<K,V> keySet;
596 <    private transient Values<K,V> values;
597 <    private transient EntrySet<K,V> entrySet;
595 >    private transient KeySetView<K,V> keySet;
596 >    private transient ValuesView<K,V> values;
597 >    private transient EntrySetView<K,V> entrySet;
598  
599      /** For serialization compatibility. Null unless serialized; see below */
600      private Segment<K,V>[] segments;
# Line 353 | Line 613 | public class ConcurrentHashMapV8<K, V>
613       * inline assignments below.
614       */
615  
616 <    static final Node tabAt(Node[] tab, int i) { // used by InternalIterator
616 >    static final Node tabAt(Node[] tab, int i) { // used by Iter
617          return (Node)UNSAFE.getObjectVolatile(tab, ((long)i<<ASHIFT)+ABASE);
618      }
619  
# Line 365 | Line 625 | public class ConcurrentHashMapV8<K, V>
625          UNSAFE.putObjectVolatile(tab, ((long)i<<ASHIFT)+ABASE, v);
626      }
627  
628 <    /* ----------------Table Initialization and Resizing -------------- */
628 >    /* ---------------- Nodes -------------- */
629  
630      /**
631 <     * Returns a power of two table size for the given desired capacity.
632 <     * See Hackers Delight, sec 3.2
631 >     * Key-value entry. Note that this is never exported out as a
632 >     * user-visible Map.Entry (see MapEntry below). Nodes with a hash
633 >     * field of MOVED are special, and do not contain user keys or
634 >     * values.  Otherwise, keys are never null, and null val fields
635 >     * indicate that a node is in the process of being deleted or
636 >     * created. For purposes of read-only access, a key may be read
637 >     * before a val, but can only be used after checking val to be
638 >     * non-null.
639       */
640 <    private static final int tableSizeFor(int c) {
641 <        int n = c - 1;
642 <        n |= n >>> 1;
643 <        n |= n >>> 2;
644 <        n |= n >>> 4;
379 <        n |= n >>> 8;
380 <        n |= n >>> 16;
381 <        return (n < 0) ? 1 : (n >= MAXIMUM_CAPACITY) ? MAXIMUM_CAPACITY : n + 1;
382 <    }
640 >    static class Node {
641 >        volatile int hash;
642 >        final Object key;
643 >        volatile Object val;
644 >        volatile Node next;
645  
646 <    /**
647 <     * If not already resizing, initializes or creates next table and
648 <     * transfers bins. Initial table size uses the capacity recorded
649 <     * in targetCapacity.  Rechecks occupancy after a transfer to see
650 <     * if another resize is already needed because resizings are
651 <     * lagging additions.
652 <     *
653 <     * @return current table
654 <     */
655 <    private final Node[] growTable() {
656 <        if (resizing == 0 &&
657 <            UNSAFE.compareAndSwapInt(this, resizingOffset, 0, 1)) {
658 <            try {
659 <                for (;;) {
660 <                    Node[] tab = table;
661 <                    int n, c, m;
662 <                    if (tab == null)
663 <                        n = (c = targetCapacity) > 0 ? c : DEFAULT_CAPACITY;
664 <                    else if ((m = tab.length) < MAXIMUM_CAPACITY &&
665 <                             counter.sum() >= (long)threshold)
666 <                        n = m << 1;
667 <                    else
668 <                        break;
669 <                    threshold = n - (n >>> 2) - THRESHOLD_OFFSET;
670 <                    Node[] nextTab = new Node[n];
671 <                    if (tab != null)
672 <                        transfer(tab, nextTab,
673 <                                 new Node(SIGN_BIT, nextTab, null, null));
674 <                    table = nextTab;
675 <                    if (tab == null)
646 >        Node(int hash, Object key, Object val, Node next) {
647 >            this.hash = hash;
648 >            this.key = key;
649 >            this.val = val;
650 >            this.next = next;
651 >        }
652 >
653 >        /** CompareAndSet the hash field */
654 >        final boolean casHash(int cmp, int val) {
655 >            return UNSAFE.compareAndSwapInt(this, hashOffset, cmp, val);
656 >        }
657 >
658 >        /** The number of spins before blocking for a lock */
659 >        static final int MAX_SPINS =
660 >            Runtime.getRuntime().availableProcessors() > 1 ? 64 : 1;
661 >
662 >        /**
663 >         * Spins a while if LOCKED bit set and this node is the first
664 >         * of its bin, and then sets WAITING bits on hash field and
665 >         * blocks (once) if they are still set.  It is OK for this
666 >         * method to return even if lock is not available upon exit,
667 >         * which enables these simple single-wait mechanics.
668 >         *
669 >         * The corresponding signalling operation is performed within
670 >         * callers: Upon detecting that WAITING has been set when
671 >         * unlocking lock (via a failed CAS from non-waiting LOCKED
672 >         * state), unlockers acquire the sync lock and perform a
673 >         * notifyAll.
674 >         *
675 >         * The initial sanity check on tab and bounds is not currently
676 >         * necessary in the only usages of this method, but enables
677 >         * use in other future contexts.
678 >         */
679 >        final void tryAwaitLock(Node[] tab, int i) {
680 >            if (tab != null && i >= 0 && i < tab.length) { // sanity check
681 >                int r = ThreadLocalRandom.current().nextInt(); // randomize spins
682 >                int spins = MAX_SPINS, h;
683 >                while (tabAt(tab, i) == this && ((h = hash) & LOCKED) != 0) {
684 >                    if (spins >= 0) {
685 >                        r ^= r << 1; r ^= r >>> 3; r ^= r << 10; // xorshift
686 >                        if (r >= 0 && --spins == 0)
687 >                            Thread.yield();  // yield before block
688 >                    }
689 >                    else if (casHash(h, h | WAITING)) {
690 >                        synchronized (this) {
691 >                            if (tabAt(tab, i) == this &&
692 >                                (hash & WAITING) == WAITING) {
693 >                                try {
694 >                                    wait();
695 >                                } catch (InterruptedException ie) {
696 >                                    try {
697 >                                        Thread.currentThread().interrupt();
698 >                                    } catch (SecurityException ignore) {
699 >                                    }
700 >                                }
701 >                            }
702 >                            else
703 >                                notifyAll(); // possibly won race vs signaller
704 >                        }
705                          break;
706 +                    }
707                  }
416            } finally {
417                resizing = 0;
708              }
709          }
710 <        else if (table == null)
711 <            Thread.yield(); // lost initialization race; just spin
712 <        return table;
710 >
711 >        // Unsafe mechanics for casHash
712 >        private static final sun.misc.Unsafe UNSAFE;
713 >        private static final long hashOffset;
714 >
715 >        static {
716 >            try {
717 >                UNSAFE = getUnsafe();
718 >                Class<?> k = Node.class;
719 >                hashOffset = UNSAFE.objectFieldOffset
720 >                    (k.getDeclaredField("hash"));
721 >            } catch (Exception e) {
722 >                throw new Error(e);
723 >            }
724 >        }
725      }
726  
727 <    /*
728 <     * Reclassifies nodes in each bin to new table.  Because we are
729 <     * using power-of-two expansion, the elements from each bin must
730 <     * either stay at same index, or move with a power of two
429 <     * offset. We eliminate unnecessary node creation by catching
430 <     * cases where old nodes can be reused because their next fields
431 <     * won't change.  Statistically, only about one-sixth of them need
432 <     * cloning when a table doubles. The nodes they replace will be
433 <     * garbage collectable as soon as they are no longer referenced by
434 <     * any reader thread that may be in the midst of concurrently
435 <     * traversing table.
436 <     *
437 <     * Transfers are done from the bottom up to preserve iterator
438 <     * traversability. On each step, the old bin is locked,
439 <     * moved/copied, and then replaced with a forwarding node.
727 >    /* ---------------- TreeBins -------------- */
728 >
729 >    /**
730 >     * Nodes for use in TreeBins
731       */
732 <    private static final void transfer(Node[] tab, Node[] nextTab, Node fwd) {
733 <        int n = tab.length;
734 <        Node ignore = nextTab[n + n - 1]; // force bounds check
735 <        for (int i = n - 1; i >= 0; --i) {
736 <            for (Node e;;) {
737 <                if ((e = tabAt(tab, i)) != null) {
738 <                    boolean validated = false;
739 <                    synchronized (e) {
740 <                        if (tabAt(tab, i) == e) {
741 <                            validated = true;
742 <                            Node lo = null, hi = null, lastRun = e;
743 <                            int runBit = e.hash & n;
744 <                            for (Node p = e.next; p != null; p = p.next) {
745 <                                int b = p.hash & n;
746 <                                if (b != runBit) {
747 <                                    runBit = b;
748 <                                    lastRun = p;
732 >    static final class TreeNode extends Node {
733 >        TreeNode parent;  // red-black tree links
734 >        TreeNode left;
735 >        TreeNode right;
736 >        TreeNode prev;    // needed to unlink next upon deletion
737 >        boolean red;
738 >
739 >        TreeNode(int hash, Object key, Object val, Node next, TreeNode parent) {
740 >            super(hash, key, val, next);
741 >            this.parent = parent;
742 >        }
743 >    }
744 >
745 >    /**
746 >     * A specialized form of red-black tree for use in bins
747 >     * whose size exceeds a threshold.
748 >     *
749 >     * TreeBins use a special form of comparison for search and
750 >     * related operations (which is the main reason we cannot use
751 >     * existing collections such as TreeMaps). TreeBins contain
752 >     * Comparable elements, but may contain others, as well as
753 >     * elements that are Comparable but not necessarily Comparable<T>
754 >     * for the same T, so we cannot invoke compareTo among them. To
755 >     * handle this, the tree is ordered primarily by hash value, then
756 >     * by getClass().getName() order, and then by Comparator order
757 >     * among elements of the same class.  On lookup at a node, if
758 >     * elements are not comparable or compare as 0, both left and
759 >     * right children may need to be searched in the case of tied hash
760 >     * values. (This corresponds to the full list search that would be
761 >     * necessary if all elements were non-Comparable and had tied
762 >     * hashes.)  The red-black balancing code is updated from
763 >     * pre-jdk-collections
764 >     * (http://gee.cs.oswego.edu/dl/classes/collections/RBCell.java)
765 >     * based in turn on Cormen, Leiserson, and Rivest "Introduction to
766 >     * Algorithms" (CLR).
767 >     *
768 >     * TreeBins also maintain a separate locking discipline than
769 >     * regular bins. Because they are forwarded via special MOVED
770 >     * nodes at bin heads (which can never change once established),
771 >     * we cannot use those nodes as locks. Instead, TreeBin
772 >     * extends AbstractQueuedSynchronizer to support a simple form of
773 >     * read-write lock. For update operations and table validation,
774 >     * the exclusive form of lock behaves in the same way as bin-head
775 >     * locks. However, lookups use shared read-lock mechanics to allow
776 >     * multiple readers in the absence of writers.  Additionally,
777 >     * these lookups do not ever block: While the lock is not
778 >     * available, they proceed along the slow traversal path (via
779 >     * next-pointers) until the lock becomes available or the list is
780 >     * exhausted, whichever comes first. (These cases are not fast,
781 >     * but maximize aggregate expected throughput.)  The AQS mechanics
782 >     * for doing this are straightforward.  The lock state is held as
783 >     * AQS getState().  Read counts are negative; the write count (1)
784 >     * is positive.  There are no signalling preferences among readers
785 >     * and writers. Since we don't need to export full Lock API, we
786 >     * just override the minimal AQS methods and use them directly.
787 >     */
788 >    static final class TreeBin extends AbstractQueuedSynchronizer {
789 >        private static final long serialVersionUID = 2249069246763182397L;
790 >        transient TreeNode root;  // root of tree
791 >        transient TreeNode first; // head of next-pointer list
792 >
793 >        /* AQS overrides */
794 >        public final boolean isHeldExclusively() { return getState() > 0; }
795 >        public final boolean tryAcquire(int ignore) {
796 >            if (compareAndSetState(0, 1)) {
797 >                setExclusiveOwnerThread(Thread.currentThread());
798 >                return true;
799 >            }
800 >            return false;
801 >        }
802 >        public final boolean tryRelease(int ignore) {
803 >            setExclusiveOwnerThread(null);
804 >            setState(0);
805 >            return true;
806 >        }
807 >        public final int tryAcquireShared(int ignore) {
808 >            for (int c;;) {
809 >                if ((c = getState()) > 0)
810 >                    return -1;
811 >                if (compareAndSetState(c, c -1))
812 >                    return 1;
813 >            }
814 >        }
815 >        public final boolean tryReleaseShared(int ignore) {
816 >            int c;
817 >            do {} while (!compareAndSetState(c = getState(), c + 1));
818 >            return c == -1;
819 >        }
820 >
821 >        /** From CLR */
822 >        private void rotateLeft(TreeNode p) {
823 >            if (p != null) {
824 >                TreeNode r = p.right, pp, rl;
825 >                if ((rl = p.right = r.left) != null)
826 >                    rl.parent = p;
827 >                if ((pp = r.parent = p.parent) == null)
828 >                    root = r;
829 >                else if (pp.left == p)
830 >                    pp.left = r;
831 >                else
832 >                    pp.right = r;
833 >                r.left = p;
834 >                p.parent = r;
835 >            }
836 >        }
837 >
838 >        /** From CLR */
839 >        private void rotateRight(TreeNode p) {
840 >            if (p != null) {
841 >                TreeNode l = p.left, pp, lr;
842 >                if ((lr = p.left = l.right) != null)
843 >                    lr.parent = p;
844 >                if ((pp = l.parent = p.parent) == null)
845 >                    root = l;
846 >                else if (pp.right == p)
847 >                    pp.right = l;
848 >                else
849 >                    pp.left = l;
850 >                l.right = p;
851 >                p.parent = l;
852 >            }
853 >        }
854 >
855 >        /**
856 >         * Returns the TreeNode (or null if not found) for the given key
857 >         * starting at given root.
858 >         */
859 >        @SuppressWarnings("unchecked") final TreeNode getTreeNode
860 >            (int h, Object k, TreeNode p) {
861 >            Class<?> c = k.getClass();
862 >            while (p != null) {
863 >                int dir, ph;  Object pk; Class<?> pc;
864 >                if ((ph = p.hash) == h) {
865 >                    if ((pk = p.key) == k || k.equals(pk))
866 >                        return p;
867 >                    if (c != (pc = pk.getClass()) ||
868 >                        !(k instanceof Comparable) ||
869 >                        (dir = ((Comparable)k).compareTo((Comparable)pk)) == 0) {
870 >                        if ((dir = (c == pc) ? 0 :
871 >                             c.getName().compareTo(pc.getName())) == 0) {
872 >                            TreeNode r = null, pl, pr; // check both sides
873 >                            if ((pr = p.right) != null && h >= pr.hash &&
874 >                                (r = getTreeNode(h, k, pr)) != null)
875 >                                return r;
876 >                            else if ((pl = p.left) != null && h <= pl.hash)
877 >                                dir = -1;
878 >                            else // nothing there
879 >                                return null;
880 >                        }
881 >                    }
882 >                }
883 >                else
884 >                    dir = (h < ph) ? -1 : 1;
885 >                p = (dir > 0) ? p.right : p.left;
886 >            }
887 >            return null;
888 >        }
889 >
890 >        /**
891 >         * Wrapper for getTreeNode used by CHM.get. Tries to obtain
892 >         * read-lock to call getTreeNode, but during failure to get
893 >         * lock, searches along next links.
894 >         */
895 >        final Object getValue(int h, Object k) {
896 >            Node r = null;
897 >            int c = getState(); // Must read lock state first
898 >            for (Node e = first; e != null; e = e.next) {
899 >                if (c <= 0 && compareAndSetState(c, c - 1)) {
900 >                    try {
901 >                        r = getTreeNode(h, k, root);
902 >                    } finally {
903 >                        releaseShared(0);
904 >                    }
905 >                    break;
906 >                }
907 >                else if ((e.hash & HASH_BITS) == h && k.equals(e.key)) {
908 >                    r = e;
909 >                    break;
910 >                }
911 >                else
912 >                    c = getState();
913 >            }
914 >            return r == null ? null : r.val;
915 >        }
916 >
917 >        /**
918 >         * Finds or adds a node.
919 >         * @return null if added
920 >         */
921 >        @SuppressWarnings("unchecked") final TreeNode putTreeNode
922 >            (int h, Object k, Object v) {
923 >            Class<?> c = k.getClass();
924 >            TreeNode pp = root, p = null;
925 >            int dir = 0;
926 >            while (pp != null) { // find existing node or leaf to insert at
927 >                int ph;  Object pk; Class<?> pc;
928 >                p = pp;
929 >                if ((ph = p.hash) == h) {
930 >                    if ((pk = p.key) == k || k.equals(pk))
931 >                        return p;
932 >                    if (c != (pc = pk.getClass()) ||
933 >                        !(k instanceof Comparable) ||
934 >                        (dir = ((Comparable)k).compareTo((Comparable)pk)) == 0) {
935 >                        TreeNode s = null, r = null, pr;
936 >                        if ((dir = (c == pc) ? 0 :
937 >                             c.getName().compareTo(pc.getName())) == 0) {
938 >                            if ((pr = p.right) != null && h >= pr.hash &&
939 >                                (r = getTreeNode(h, k, pr)) != null)
940 >                                return r;
941 >                            else // continue left
942 >                                dir = -1;
943 >                        }
944 >                        else if ((pr = p.right) != null && h >= pr.hash)
945 >                            s = pr;
946 >                        if (s != null && (r = getTreeNode(h, k, s)) != null)
947 >                            return r;
948 >                    }
949 >                }
950 >                else
951 >                    dir = (h < ph) ? -1 : 1;
952 >                pp = (dir > 0) ? p.right : p.left;
953 >            }
954 >
955 >            TreeNode f = first;
956 >            TreeNode x = first = new TreeNode(h, k, v, f, p);
957 >            if (p == null)
958 >                root = x;
959 >            else { // attach and rebalance; adapted from CLR
960 >                TreeNode xp, xpp;
961 >                if (f != null)
962 >                    f.prev = x;
963 >                if (dir <= 0)
964 >                    p.left = x;
965 >                else
966 >                    p.right = x;
967 >                x.red = true;
968 >                while (x != null && (xp = x.parent) != null && xp.red &&
969 >                       (xpp = xp.parent) != null) {
970 >                    TreeNode xppl = xpp.left;
971 >                    if (xp == xppl) {
972 >                        TreeNode y = xpp.right;
973 >                        if (y != null && y.red) {
974 >                            y.red = false;
975 >                            xp.red = false;
976 >                            xpp.red = true;
977 >                            x = xpp;
978 >                        }
979 >                        else {
980 >                            if (x == xp.right) {
981 >                                rotateLeft(x = xp);
982 >                                xpp = (xp = x.parent) == null ? null : xp.parent;
983 >                            }
984 >                            if (xp != null) {
985 >                                xp.red = false;
986 >                                if (xpp != null) {
987 >                                    xpp.red = true;
988 >                                    rotateRight(xpp);
989                                  }
990                              }
991 <                            if (runBit == 0)
992 <                                lo = lastRun;
993 <                            else
994 <                                hi = lastRun;
995 <                            for (Node p = e; p != lastRun; p = p.next) {
996 <                                int ph = p.hash;
997 <                                Object pk = p.key, pv = p.val;
998 <                                if ((ph & n) == 0)
999 <                                    lo = new Node(ph, pk, pv, lo);
1000 <                                else
1001 <                                    hi = new Node(ph, pk, pv, hi);
991 >                        }
992 >                    }
993 >                    else {
994 >                        TreeNode y = xppl;
995 >                        if (y != null && y.red) {
996 >                            y.red = false;
997 >                            xp.red = false;
998 >                            xpp.red = true;
999 >                            x = xpp;
1000 >                        }
1001 >                        else {
1002 >                            if (x == xp.left) {
1003 >                                rotateRight(x = xp);
1004 >                                xpp = (xp = x.parent) == null ? null : xp.parent;
1005 >                            }
1006 >                            if (xp != null) {
1007 >                                xp.red = false;
1008 >                                if (xpp != null) {
1009 >                                    xpp.red = true;
1010 >                                    rotateLeft(xpp);
1011 >                                }
1012                              }
472                            setTabAt(nextTab, i, lo);
473                            setTabAt(nextTab, i + n, hi);
474                            setTabAt(tab, i, fwd);
1013                          }
1014                      }
1015 <                    if (validated)
1015 >                }
1016 >                TreeNode r = root;
1017 >                if (r != null && r.red)
1018 >                    r.red = false;
1019 >            }
1020 >            return null;
1021 >        }
1022 >
1023 >        /**
1024 >         * Removes the given node, that must be present before this
1025 >         * call.  This is messier than typical red-black deletion code
1026 >         * because we cannot swap the contents of an interior node
1027 >         * with a leaf successor that is pinned by "next" pointers
1028 >         * that are accessible independently of lock. So instead we
1029 >         * swap the tree linkages.
1030 >         */
1031 >        final void deleteTreeNode(TreeNode p) {
1032 >            TreeNode next = (TreeNode)p.next; // unlink traversal pointers
1033 >            TreeNode pred = p.prev;
1034 >            if (pred == null)
1035 >                first = next;
1036 >            else
1037 >                pred.next = next;
1038 >            if (next != null)
1039 >                next.prev = pred;
1040 >            TreeNode replacement;
1041 >            TreeNode pl = p.left;
1042 >            TreeNode pr = p.right;
1043 >            if (pl != null && pr != null) {
1044 >                TreeNode s = pr, sl;
1045 >                while ((sl = s.left) != null) // find successor
1046 >                    s = sl;
1047 >                boolean c = s.red; s.red = p.red; p.red = c; // swap colors
1048 >                TreeNode sr = s.right;
1049 >                TreeNode pp = p.parent;
1050 >                if (s == pr) { // p was s's direct parent
1051 >                    p.parent = s;
1052 >                    s.right = p;
1053 >                }
1054 >                else {
1055 >                    TreeNode sp = s.parent;
1056 >                    if ((p.parent = sp) != null) {
1057 >                        if (s == sp.left)
1058 >                            sp.left = p;
1059 >                        else
1060 >                            sp.right = p;
1061 >                    }
1062 >                    if ((s.right = pr) != null)
1063 >                        pr.parent = s;
1064 >                }
1065 >                p.left = null;
1066 >                if ((p.right = sr) != null)
1067 >                    sr.parent = p;
1068 >                if ((s.left = pl) != null)
1069 >                    pl.parent = s;
1070 >                if ((s.parent = pp) == null)
1071 >                    root = s;
1072 >                else if (p == pp.left)
1073 >                    pp.left = s;
1074 >                else
1075 >                    pp.right = s;
1076 >                replacement = sr;
1077 >            }
1078 >            else
1079 >                replacement = (pl != null) ? pl : pr;
1080 >            TreeNode pp = p.parent;
1081 >            if (replacement == null) {
1082 >                if (pp == null) {
1083 >                    root = null;
1084 >                    return;
1085 >                }
1086 >                replacement = p;
1087 >            }
1088 >            else {
1089 >                replacement.parent = pp;
1090 >                if (pp == null)
1091 >                    root = replacement;
1092 >                else if (p == pp.left)
1093 >                    pp.left = replacement;
1094 >                else
1095 >                    pp.right = replacement;
1096 >                p.left = p.right = p.parent = null;
1097 >            }
1098 >            if (!p.red) { // rebalance, from CLR
1099 >                TreeNode x = replacement;
1100 >                while (x != null) {
1101 >                    TreeNode xp, xpl;
1102 >                    if (x.red || (xp = x.parent) == null) {
1103 >                        x.red = false;
1104                          break;
1105 +                    }
1106 +                    if (x == (xpl = xp.left)) {
1107 +                        TreeNode sib = xp.right;
1108 +                        if (sib != null && sib.red) {
1109 +                            sib.red = false;
1110 +                            xp.red = true;
1111 +                            rotateLeft(xp);
1112 +                            sib = (xp = x.parent) == null ? null : xp.right;
1113 +                        }
1114 +                        if (sib == null)
1115 +                            x = xp;
1116 +                        else {
1117 +                            TreeNode sl = sib.left, sr = sib.right;
1118 +                            if ((sr == null || !sr.red) &&
1119 +                                (sl == null || !sl.red)) {
1120 +                                sib.red = true;
1121 +                                x = xp;
1122 +                            }
1123 +                            else {
1124 +                                if (sr == null || !sr.red) {
1125 +                                    if (sl != null)
1126 +                                        sl.red = false;
1127 +                                    sib.red = true;
1128 +                                    rotateRight(sib);
1129 +                                    sib = (xp = x.parent) == null ? null : xp.right;
1130 +                                }
1131 +                                if (sib != null) {
1132 +                                    sib.red = (xp == null) ? false : xp.red;
1133 +                                    if ((sr = sib.right) != null)
1134 +                                        sr.red = false;
1135 +                                }
1136 +                                if (xp != null) {
1137 +                                    xp.red = false;
1138 +                                    rotateLeft(xp);
1139 +                                }
1140 +                                x = root;
1141 +                            }
1142 +                        }
1143 +                    }
1144 +                    else { // symmetric
1145 +                        TreeNode sib = xpl;
1146 +                        if (sib != null && sib.red) {
1147 +                            sib.red = false;
1148 +                            xp.red = true;
1149 +                            rotateRight(xp);
1150 +                            sib = (xp = x.parent) == null ? null : xp.left;
1151 +                        }
1152 +                        if (sib == null)
1153 +                            x = xp;
1154 +                        else {
1155 +                            TreeNode sl = sib.left, sr = sib.right;
1156 +                            if ((sl == null || !sl.red) &&
1157 +                                (sr == null || !sr.red)) {
1158 +                                sib.red = true;
1159 +                                x = xp;
1160 +                            }
1161 +                            else {
1162 +                                if (sl == null || !sl.red) {
1163 +                                    if (sr != null)
1164 +                                        sr.red = false;
1165 +                                    sib.red = true;
1166 +                                    rotateLeft(sib);
1167 +                                    sib = (xp = x.parent) == null ? null : xp.left;
1168 +                                }
1169 +                                if (sib != null) {
1170 +                                    sib.red = (xp == null) ? false : xp.red;
1171 +                                    if ((sl = sib.left) != null)
1172 +                                        sl.red = false;
1173 +                                }
1174 +                                if (xp != null) {
1175 +                                    xp.red = false;
1176 +                                    rotateRight(xp);
1177 +                                }
1178 +                                x = root;
1179 +                            }
1180 +                        }
1181 +                    }
1182                  }
1183 <                else if (casTabAt(tab, i, e, fwd))
1184 <                    break;
1183 >            }
1184 >            if (p == replacement && (pp = p.parent) != null) {
1185 >                if (p == pp.left) // detach pointers
1186 >                    pp.left = null;
1187 >                else if (p == pp.right)
1188 >                    pp.right = null;
1189 >                p.parent = null;
1190              }
1191          }
1192      }
1193  
1194 <    /* ---------------- Internal access and update methods -------------- */
1194 >    /* ---------------- Collision reduction methods -------------- */
1195  
1196      /**
1197 <     * Applies a supplemental hash function to a given hashCode, which
1198 <     * defends against poor quality hash functions.  The result must
1199 <     * be non-negative, and for reasonable performance must have good
1200 <     * avalanche properties; i.e., that each bit of the argument
1201 <     * affects each bit (except sign bit) of the result.
1197 >     * Spreads higher bits to lower, and also forces top 2 bits to 0.
1198 >     * Because the table uses power-of-two masking, sets of hashes
1199 >     * that vary only in bits above the current mask will always
1200 >     * collide. (Among known examples are sets of Float keys holding
1201 >     * consecutive whole numbers in small tables.)  To counter this,
1202 >     * we apply a transform that spreads the impact of higher bits
1203 >     * downward. There is a tradeoff between speed, utility, and
1204 >     * quality of bit-spreading. Because many common sets of hashes
1205 >     * are already reasonably distributed across bits (so don't benefit
1206 >     * from spreading), and because we use trees to handle large sets
1207 >     * of collisions in bins, we don't need excessively high quality.
1208       */
1209      private static final int spread(int h) {
1210 <        // Apply base step of MurmurHash; see http://code.google.com/p/smhasher/
1211 <        h ^= h >>> 16;
498 <        h *= 0x85ebca6b;
499 <        h ^= h >>> 13;
500 <        h *= 0xc2b2ae35;
501 <        return (h >>> 16) ^ (h & 0x7fffffff); // mask out sign bit
1210 >        h ^= (h >>> 18) ^ (h >>> 12);
1211 >        return (h ^ (h >>> 10)) & HASH_BITS;
1212      }
1213  
1214 +    /**
1215 +     * Replaces a list bin with a tree bin. Call only when locked.
1216 +     * Fails to replace if the given key is non-comparable or table
1217 +     * is, or needs, resizing.
1218 +     */
1219 +    private final void replaceWithTreeBin(Node[] tab, int index, Object key) {
1220 +        if ((key instanceof Comparable) &&
1221 +            (tab.length >= MAXIMUM_CAPACITY || counter.sum() < (long)sizeCtl)) {
1222 +            TreeBin t = new TreeBin();
1223 +            for (Node e = tabAt(tab, index); e != null; e = e.next)
1224 +                t.putTreeNode(e.hash & HASH_BITS, e.key, e.val);
1225 +            setTabAt(tab, index, new Node(MOVED, t, null, null));
1226 +        }
1227 +    }
1228 +
1229 +    /* ---------------- Internal access and update methods -------------- */
1230 +
1231      /** Implementation for get and containsKey */
1232      private final Object internalGet(Object k) {
1233          int h = spread(k.hashCode());
1234          retry: for (Node[] tab = table; tab != null;) {
1235 <            Node e; Object ek, ev; int eh;  // locals to read fields once
1235 >            Node e, p; Object ek, ev; int eh;      // locals to read fields once
1236              for (e = tabAt(tab, (tab.length - 1) & h); e != null; e = e.next) {
1237 <                if ((eh = e.hash) == h) {
1238 <                    if ((ev = e.val) != null &&
1239 <                        ((ek = e.key) == k || k.equals(ek)))
1240 <                        return ev;
1241 <                }
1242 <                else if (eh < 0) {          // sign bit set
1243 <                    tab = (Node[])e.key;    // bin was moved during resize
517 <                    continue retry;
1237 >                if ((eh = e.hash) == MOVED) {
1238 >                    if ((ek = e.key) instanceof TreeBin)  // search TreeBin
1239 >                        return ((TreeBin)ek).getValue(h, k);
1240 >                    else {                        // restart with new table
1241 >                        tab = (Node[])ek;
1242 >                        continue retry;
1243 >                    }
1244                  }
1245 +                else if ((eh & HASH_BITS) == h && (ev = e.val) != null &&
1246 +                         ((ek = e.key) == k || k.equals(ek)))
1247 +                    return ev;
1248              }
1249              break;
1250          }
1251          return null;
1252      }
1253  
1254 <    /** Implementation for put and putIfAbsent */
1255 <    private final Object internalPut(Object k, Object v, boolean replace) {
1254 >    /**
1255 >     * Implementation for the four public remove/replace methods:
1256 >     * Replaces node value with v, conditional upon match of cv if
1257 >     * non-null.  If resulting value is null, delete.
1258 >     */
1259 >    private final Object internalReplace(Object k, Object v, Object cv) {
1260          int h = spread(k.hashCode());
1261 <        Object oldVal = null;               // previous value or null if none
1261 >        Object oldVal = null;
1262          for (Node[] tab = table;;) {
1263 <            Node e; int i; Object ek, ev;
1263 >            Node f; int i, fh; Object fk;
1264 >            if (tab == null ||
1265 >                (f = tabAt(tab, i = (tab.length - 1) & h)) == null)
1266 >                break;
1267 >            else if ((fh = f.hash) == MOVED) {
1268 >                if ((fk = f.key) instanceof TreeBin) {
1269 >                    TreeBin t = (TreeBin)fk;
1270 >                    boolean validated = false;
1271 >                    boolean deleted = false;
1272 >                    t.acquire(0);
1273 >                    try {
1274 >                        if (tabAt(tab, i) == f) {
1275 >                            validated = true;
1276 >                            TreeNode p = t.getTreeNode(h, k, t.root);
1277 >                            if (p != null) {
1278 >                                Object pv = p.val;
1279 >                                if (cv == null || cv == pv || cv.equals(pv)) {
1280 >                                    oldVal = pv;
1281 >                                    if ((p.val = v) == null) {
1282 >                                        deleted = true;
1283 >                                        t.deleteTreeNode(p);
1284 >                                    }
1285 >                                }
1286 >                            }
1287 >                        }
1288 >                    } finally {
1289 >                        t.release(0);
1290 >                    }
1291 >                    if (validated) {
1292 >                        if (deleted)
1293 >                            counter.add(-1L);
1294 >                        break;
1295 >                    }
1296 >                }
1297 >                else
1298 >                    tab = (Node[])fk;
1299 >            }
1300 >            else if ((fh & HASH_BITS) != h && f.next == null) // precheck
1301 >                break;                          // rules out possible existence
1302 >            else if ((fh & LOCKED) != 0) {
1303 >                checkForResize();               // try resizing if can't get lock
1304 >                f.tryAwaitLock(tab, i);
1305 >            }
1306 >            else if (f.casHash(fh, fh | LOCKED)) {
1307 >                boolean validated = false;
1308 >                boolean deleted = false;
1309 >                try {
1310 >                    if (tabAt(tab, i) == f) {
1311 >                        validated = true;
1312 >                        for (Node e = f, pred = null;;) {
1313 >                            Object ek, ev;
1314 >                            if ((e.hash & HASH_BITS) == h &&
1315 >                                ((ev = e.val) != null) &&
1316 >                                ((ek = e.key) == k || k.equals(ek))) {
1317 >                                if (cv == null || cv == ev || cv.equals(ev)) {
1318 >                                    oldVal = ev;
1319 >                                    if ((e.val = v) == null) {
1320 >                                        deleted = true;
1321 >                                        Node en = e.next;
1322 >                                        if (pred != null)
1323 >                                            pred.next = en;
1324 >                                        else
1325 >                                            setTabAt(tab, i, en);
1326 >                                    }
1327 >                                }
1328 >                                break;
1329 >                            }
1330 >                            pred = e;
1331 >                            if ((e = e.next) == null)
1332 >                                break;
1333 >                        }
1334 >                    }
1335 >                } finally {
1336 >                    if (!f.casHash(fh | LOCKED, fh)) {
1337 >                        f.hash = fh;
1338 >                        synchronized (f) { f.notifyAll(); };
1339 >                    }
1340 >                }
1341 >                if (validated) {
1342 >                    if (deleted)
1343 >                        counter.add(-1L);
1344 >                    break;
1345 >                }
1346 >            }
1347 >        }
1348 >        return oldVal;
1349 >    }
1350 >
1351 >    /*
1352 >     * Internal versions of the six insertion methods, each a
1353 >     * little more complicated than the last. All have
1354 >     * the same basic structure as the first (internalPut):
1355 >     *  1. If table uninitialized, create
1356 >     *  2. If bin empty, try to CAS new node
1357 >     *  3. If bin stale, use new table
1358 >     *  4. if bin converted to TreeBin, validate and relay to TreeBin methods
1359 >     *  5. Lock and validate; if valid, scan and add or update
1360 >     *
1361 >     * The others interweave other checks and/or alternative actions:
1362 >     *  * Plain put checks for and performs resize after insertion.
1363 >     *  * putIfAbsent prescans for mapping without lock (and fails to add
1364 >     *    if present), which also makes pre-emptive resize checks worthwhile.
1365 >     *  * computeIfAbsent extends form used in putIfAbsent with additional
1366 >     *    mechanics to deal with, calls, potential exceptions and null
1367 >     *    returns from function call.
1368 >     *  * compute uses the same function-call mechanics, but without
1369 >     *    the prescans
1370 >     *  * merge acts as putIfAbsent in the absent case, but invokes the
1371 >     *    update function if present
1372 >     *  * putAll attempts to pre-allocate enough table space
1373 >     *    and more lazily performs count updates and checks.
1374 >     *
1375 >     * Someday when details settle down a bit more, it might be worth
1376 >     * some factoring to reduce sprawl.
1377 >     */
1378 >
1379 >    /** Implementation for put */
1380 >    private final Object internalPut(Object k, Object v) {
1381 >        int h = spread(k.hashCode());
1382 >        int count = 0;
1383 >        for (Node[] tab = table;;) {
1384 >            int i; Node f; int fh; Object fk;
1385              if (tab == null)
1386 <                tab = growTable();
1387 <            else if ((e = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1386 >                tab = initTable();
1387 >            else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1388                  if (casTabAt(tab, i, null, new Node(h, k, v, null)))
1389                      break;                   // no lock when adding to empty bin
1390              }
1391 <            else if (e.hash < 0)             // resized -- restart with new table
1392 <                tab = (Node[])e.key;
1393 <            else if (!replace && e.hash == h && (ev = e.val) != null &&
1394 <                     ((ek = e.key) == k || k.equals(ek))) {
1395 <                if (tabAt(tab, i) == e) {    // inspect and validate 1st node
1396 <                    oldVal = ev;             // without lock for putIfAbsent
1397 <                    break;
1391 >            else if ((fh = f.hash) == MOVED) {
1392 >                if ((fk = f.key) instanceof TreeBin) {
1393 >                    TreeBin t = (TreeBin)fk;
1394 >                    Object oldVal = null;
1395 >                    t.acquire(0);
1396 >                    try {
1397 >                        if (tabAt(tab, i) == f) {
1398 >                            count = 2;
1399 >                            TreeNode p = t.putTreeNode(h, k, v);
1400 >                            if (p != null) {
1401 >                                oldVal = p.val;
1402 >                                p.val = v;
1403 >                            }
1404 >                        }
1405 >                    } finally {
1406 >                        t.release(0);
1407 >                    }
1408 >                    if (count != 0) {
1409 >                        if (oldVal != null)
1410 >                            return oldVal;
1411 >                        break;
1412 >                    }
1413                  }
1414 +                else
1415 +                    tab = (Node[])fk;
1416              }
1417 <            else {
1418 <                boolean validated = false;
1419 <                boolean checkSize = false;
1420 <                synchronized (e) {           // lock the 1st node of bin list
1421 <                    if (tabAt(tab, i) == e) {
1422 <                        validated = true;    // retry if 1st already deleted
1423 <                        for (Node first = e;;) {
1424 <                            if (e.hash == h &&
1425 <                                ((ek = e.key) == k || k.equals(ek)) &&
1426 <                                (ev = e.val) != null) {
1417 >            else if ((fh & LOCKED) != 0) {
1418 >                checkForResize();
1419 >                f.tryAwaitLock(tab, i);
1420 >            }
1421 >            else if (f.casHash(fh, fh | LOCKED)) {
1422 >                Object oldVal = null;
1423 >                try {                        // needed in case equals() throws
1424 >                    if (tabAt(tab, i) == f) {
1425 >                        count = 1;
1426 >                        for (Node e = f;; ++count) {
1427 >                            Object ek, ev;
1428 >                            if ((e.hash & HASH_BITS) == h &&
1429 >                                (ev = e.val) != null &&
1430 >                                ((ek = e.key) == k || k.equals(ek))) {
1431                                  oldVal = ev;
1432 <                                if (replace)
558 <                                    e.val = v;
1432 >                                e.val = v;
1433                                  break;
1434                              }
1435                              Node last = e;
1436                              if ((e = e.next) == null) {
1437                                  last.next = new Node(h, k, v, null);
1438 <                                if (last != first || tab.length <= 64)
1439 <                                    checkSize = true;
1438 >                                if (count >= TREE_THRESHOLD)
1439 >                                    replaceWithTreeBin(tab, i, k);
1440                                  break;
1441                              }
1442                          }
1443                      }
1444 +                } finally {                  // unlock and signal if needed
1445 +                    if (!f.casHash(fh | LOCKED, fh)) {
1446 +                        f.hash = fh;
1447 +                        synchronized (f) { f.notifyAll(); };
1448 +                    }
1449                  }
1450 <                if (validated) {
1451 <                    if (checkSize && tab.length < MAXIMUM_CAPACITY &&
1452 <                        resizing == 0 && counter.sum() >= (long)threshold)
1453 <                        growTable();
1450 >                if (count != 0) {
1451 >                    if (oldVal != null)
1452 >                        return oldVal;
1453 >                    if (tab.length <= 64)
1454 >                        count = 2;
1455                      break;
1456                  }
1457              }
1458          }
1459 <        if (oldVal == null)
1460 <            counter.increment();             // update counter outside of locks
1461 <        return oldVal;
1459 >        counter.add(1L);
1460 >        if (count > 1)
1461 >            checkForResize();
1462 >        return null;
1463      }
1464  
1465 <    /**
1466 <     * Implementation for the four public remove/replace methods:
586 <     * Replaces node value with v, conditional upon match of cv if
587 <     * non-null.  If resulting value is null, delete.
588 <     */
589 <    private final Object internalReplace(Object k, Object v, Object cv) {
1465 >    /** Implementation for putIfAbsent */
1466 >    private final Object internalPutIfAbsent(Object k, Object v) {
1467          int h = spread(k.hashCode());
1468 +        int count = 0;
1469          for (Node[] tab = table;;) {
1470 <            Node e; int i;
1471 <            if (tab == null ||
1472 <                (e = tabAt(tab, i = (tab.length - 1) & h)) == null)
1473 <                return null;
1474 <            else if (e.hash < 0)
1475 <                tab = (Node[])e.key;
1470 >            int i; Node f; int fh; Object fk, fv;
1471 >            if (tab == null)
1472 >                tab = initTable();
1473 >            else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1474 >                if (casTabAt(tab, i, null, new Node(h, k, v, null)))
1475 >                    break;
1476 >            }
1477 >            else if ((fh = f.hash) == MOVED) {
1478 >                if ((fk = f.key) instanceof TreeBin) {
1479 >                    TreeBin t = (TreeBin)fk;
1480 >                    Object oldVal = null;
1481 >                    t.acquire(0);
1482 >                    try {
1483 >                        if (tabAt(tab, i) == f) {
1484 >                            count = 2;
1485 >                            TreeNode p = t.putTreeNode(h, k, v);
1486 >                            if (p != null)
1487 >                                oldVal = p.val;
1488 >                        }
1489 >                    } finally {
1490 >                        t.release(0);
1491 >                    }
1492 >                    if (count != 0) {
1493 >                        if (oldVal != null)
1494 >                            return oldVal;
1495 >                        break;
1496 >                    }
1497 >                }
1498 >                else
1499 >                    tab = (Node[])fk;
1500 >            }
1501 >            else if ((fh & HASH_BITS) == h && (fv = f.val) != null &&
1502 >                     ((fk = f.key) == k || k.equals(fk)))
1503 >                return fv;
1504              else {
1505 <                Object oldVal = null;
1506 <                boolean validated = false;
1507 <                boolean deleted = false;
1508 <                synchronized (e) {
1509 <                    if (tabAt(tab, i) == e) {
1510 <                        validated = true;
1511 <                        Node pred = null;
1512 <                        do {
1513 <                            Object ek, ev;
1514 <                            if (e.hash == h &&
1515 <                                ((ek = e.key) == k || k.equals(ek)) &&
1516 <                                ((ev = e.val) != null)) {
1517 <                                if (cv == null || cv == ev || cv.equals(ev)) {
1505 >                Node g = f.next;
1506 >                if (g != null) { // at least 2 nodes -- search and maybe resize
1507 >                    for (Node e = g;;) {
1508 >                        Object ek, ev;
1509 >                        if ((e.hash & HASH_BITS) == h && (ev = e.val) != null &&
1510 >                            ((ek = e.key) == k || k.equals(ek)))
1511 >                            return ev;
1512 >                        if ((e = e.next) == null) {
1513 >                            checkForResize();
1514 >                            break;
1515 >                        }
1516 >                    }
1517 >                }
1518 >                if (((fh = f.hash) & LOCKED) != 0) {
1519 >                    checkForResize();
1520 >                    f.tryAwaitLock(tab, i);
1521 >                }
1522 >                else if (tabAt(tab, i) == f && f.casHash(fh, fh | LOCKED)) {
1523 >                    Object oldVal = null;
1524 >                    try {
1525 >                        if (tabAt(tab, i) == f) {
1526 >                            count = 1;
1527 >                            for (Node e = f;; ++count) {
1528 >                                Object ek, ev;
1529 >                                if ((e.hash & HASH_BITS) == h &&
1530 >                                    (ev = e.val) != null &&
1531 >                                    ((ek = e.key) == k || k.equals(ek))) {
1532                                      oldVal = ev;
1533 <                                    if ((e.val = v) == null) {
1534 <                                        deleted = true;
1535 <                                        Node en = e.next;
1536 <                                        if (pred != null)
1537 <                                            pred.next = en;
1538 <                                        else
1539 <                                            setTabAt(tab, i, en);
1540 <                                    }
1533 >                                    break;
1534 >                                }
1535 >                                Node last = e;
1536 >                                if ((e = e.next) == null) {
1537 >                                    last.next = new Node(h, k, v, null);
1538 >                                    if (count >= TREE_THRESHOLD)
1539 >                                        replaceWithTreeBin(tab, i, k);
1540 >                                    break;
1541                                  }
622                                break;
1542                              }
1543 <                        } while ((e = (pred = e).next) != null);
1543 >                        }
1544 >                    } finally {
1545 >                        if (!f.casHash(fh | LOCKED, fh)) {
1546 >                            f.hash = fh;
1547 >                            synchronized (f) { f.notifyAll(); };
1548 >                        }
1549 >                    }
1550 >                    if (count != 0) {
1551 >                        if (oldVal != null)
1552 >                            return oldVal;
1553 >                        if (tab.length <= 64)
1554 >                            count = 2;
1555 >                        break;
1556                      }
626                }
627                if (validated) {
628                    if (deleted)
629                        counter.decrement();
630                    return oldVal;
1557                  }
1558              }
1559          }
1560 +        counter.add(1L);
1561 +        if (count > 1)
1562 +            checkForResize();
1563 +        return null;
1564      }
1565  
1566 <    /** Implementation for computeIfAbsent and compute. Like put, but messier. */
1567 <    @SuppressWarnings("unchecked")
1568 <    private final V internalCompute(K k,
639 <                                    MappingFunction<? super K, ? extends V> f,
640 <                                    boolean replace) {
1566 >    /** Implementation for computeIfAbsent */
1567 >    private final Object internalComputeIfAbsent(K k,
1568 >                                                 Fun<? super K, ?> mf) {
1569          int h = spread(k.hashCode());
1570 <        V val = null;
1571 <        boolean added = false;
1572 <        Node[] tab = table;
1573 <        outer:for (;;) {
646 <            Node e; int i; Object ek, ev;
1570 >        Object val = null;
1571 >        int count = 0;
1572 >        for (Node[] tab = table;;) {
1573 >            Node f; int i, fh; Object fk, fv;
1574              if (tab == null)
1575 <                tab = growTable();
1576 <            else if ((e = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1577 <                Node node = new Node(h, k, null, null);
1578 <                boolean validated = false;
1579 <                synchronized (node) {  // must lock while computing value
1580 <                    if (casTabAt(tab, i, null, node)) {
1581 <                        validated = true;
1582 <                        try {
1583 <                            val = f.map(k);
1584 <                            if (val != null) {
1585 <                                node.val = val;
1575 >                tab = initTable();
1576 >            else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1577 >                Node node = new Node(fh = h | LOCKED, k, null, null);
1578 >                if (casTabAt(tab, i, null, node)) {
1579 >                    count = 1;
1580 >                    try {
1581 >                        if ((val = mf.apply(k)) != null)
1582 >                            node.val = val;
1583 >                    } finally {
1584 >                        if (val == null)
1585 >                            setTabAt(tab, i, null);
1586 >                        if (!node.casHash(fh, h)) {
1587 >                            node.hash = h;
1588 >                            synchronized (node) { node.notifyAll(); };
1589 >                        }
1590 >                    }
1591 >                }
1592 >                if (count != 0)
1593 >                    break;
1594 >            }
1595 >            else if ((fh = f.hash) == MOVED) {
1596 >                if ((fk = f.key) instanceof TreeBin) {
1597 >                    TreeBin t = (TreeBin)fk;
1598 >                    boolean added = false;
1599 >                    t.acquire(0);
1600 >                    try {
1601 >                        if (tabAt(tab, i) == f) {
1602 >                            count = 1;
1603 >                            TreeNode p = t.getTreeNode(h, k, t.root);
1604 >                            if (p != null)
1605 >                                val = p.val;
1606 >                            else if ((val = mf.apply(k)) != null) {
1607                                  added = true;
1608 +                                count = 2;
1609 +                                t.putTreeNode(h, k, val);
1610                              }
661                        } finally {
662                            if (!added)
663                                setTabAt(tab, i, null);
1611                          }
1612 +                    } finally {
1613 +                        t.release(0);
1614 +                    }
1615 +                    if (count != 0) {
1616 +                        if (!added)
1617 +                            return val;
1618 +                        break;
1619                      }
1620                  }
1621 <                if (validated)
1622 <                    break;
1621 >                else
1622 >                    tab = (Node[])fk;
1623              }
1624 <            else if (e.hash < 0)
1625 <                tab = (Node[])e.key;
1626 <            else if (!replace && e.hash == h && (ev = e.val) != null &&
1627 <                     ((ek = e.key) == k || k.equals(ek))) {
1628 <                if (tabAt(tab, i) == e) {
1629 <                    val = (V)ev;
1624 >            else if ((fh & HASH_BITS) == h && (fv = f.val) != null &&
1625 >                     ((fk = f.key) == k || k.equals(fk)))
1626 >                return fv;
1627 >            else {
1628 >                Node g = f.next;
1629 >                if (g != null) {
1630 >                    for (Node e = g;;) {
1631 >                        Object ek, ev;
1632 >                        if ((e.hash & HASH_BITS) == h && (ev = e.val) != null &&
1633 >                            ((ek = e.key) == k || k.equals(ek)))
1634 >                            return ev;
1635 >                        if ((e = e.next) == null) {
1636 >                            checkForResize();
1637 >                            break;
1638 >                        }
1639 >                    }
1640 >                }
1641 >                if (((fh = f.hash) & LOCKED) != 0) {
1642 >                    checkForResize();
1643 >                    f.tryAwaitLock(tab, i);
1644 >                }
1645 >                else if (tabAt(tab, i) == f && f.casHash(fh, fh | LOCKED)) {
1646 >                    boolean added = false;
1647 >                    try {
1648 >                        if (tabAt(tab, i) == f) {
1649 >                            count = 1;
1650 >                            for (Node e = f;; ++count) {
1651 >                                Object ek, ev;
1652 >                                if ((e.hash & HASH_BITS) == h &&
1653 >                                    (ev = e.val) != null &&
1654 >                                    ((ek = e.key) == k || k.equals(ek))) {
1655 >                                    val = ev;
1656 >                                    break;
1657 >                                }
1658 >                                Node last = e;
1659 >                                if ((e = e.next) == null) {
1660 >                                    if ((val = mf.apply(k)) != null) {
1661 >                                        added = true;
1662 >                                        last.next = new Node(h, k, val, null);
1663 >                                        if (count >= TREE_THRESHOLD)
1664 >                                            replaceWithTreeBin(tab, i, k);
1665 >                                    }
1666 >                                    break;
1667 >                                }
1668 >                            }
1669 >                        }
1670 >                    } finally {
1671 >                        if (!f.casHash(fh | LOCKED, fh)) {
1672 >                            f.hash = fh;
1673 >                            synchronized (f) { f.notifyAll(); };
1674 >                        }
1675 >                    }
1676 >                    if (count != 0) {
1677 >                        if (!added)
1678 >                            return val;
1679 >                        if (tab.length <= 64)
1680 >                            count = 2;
1681 >                        break;
1682 >                    }
1683 >                }
1684 >            }
1685 >        }
1686 >        if (val != null) {
1687 >            counter.add(1L);
1688 >            if (count > 1)
1689 >                checkForResize();
1690 >        }
1691 >        return val;
1692 >    }
1693 >
1694 >    /** Implementation for compute */
1695 >    @SuppressWarnings("unchecked") private final Object internalCompute
1696 >        (K k, boolean onlyIfPresent, BiFun<? super K, ? super V, ? extends V> mf) {
1697 >        int h = spread(k.hashCode());
1698 >        Object val = null;
1699 >        int delta = 0;
1700 >        int count = 0;
1701 >        for (Node[] tab = table;;) {
1702 >            Node f; int i, fh; Object fk;
1703 >            if (tab == null)
1704 >                tab = initTable();
1705 >            else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1706 >                if (onlyIfPresent)
1707                      break;
1708 +                Node node = new Node(fh = h | LOCKED, k, null, null);
1709 +                if (casTabAt(tab, i, null, node)) {
1710 +                    try {
1711 +                        count = 1;
1712 +                        if ((val = mf.apply(k, null)) != null) {
1713 +                            node.val = val;
1714 +                            delta = 1;
1715 +                        }
1716 +                    } finally {
1717 +                        if (delta == 0)
1718 +                            setTabAt(tab, i, null);
1719 +                        if (!node.casHash(fh, h)) {
1720 +                            node.hash = h;
1721 +                            synchronized (node) { node.notifyAll(); };
1722 +                        }
1723 +                    }
1724                  }
1725 +                if (count != 0)
1726 +                    break;
1727              }
1728 <            else if (Thread.holdsLock(e))
1729 <                throw new IllegalStateException("Recursive map computation");
1730 <            else {
1731 <                boolean validated = false;
1732 <                boolean checkSize = false;
1733 <                synchronized (e) {
1734 <                    if (tabAt(tab, i) == e) {
1735 <                        validated = true;
1736 <                        for (Node first = e;;) {
1737 <                            if (e.hash == h &&
1738 <                                ((ek = e.key) == k || k.equals(ek)) &&
1739 <                                ((ev = e.val) != null)) {
1740 <                                Object fv;
1741 <                                if (replace && (fv = f.map(k)) != null)
1742 <                                    ev = e.val = fv;
1743 <                                val = (V)ev;
1728 >            else if ((fh = f.hash) == MOVED) {
1729 >                if ((fk = f.key) instanceof TreeBin) {
1730 >                    TreeBin t = (TreeBin)fk;
1731 >                    t.acquire(0);
1732 >                    try {
1733 >                        if (tabAt(tab, i) == f) {
1734 >                            count = 1;
1735 >                            TreeNode p = t.getTreeNode(h, k, t.root);
1736 >                            Object pv = (p == null) ? null : p.val;
1737 >                            if ((val = mf.apply(k, (V)pv)) != null) {
1738 >                                if (p != null)
1739 >                                    p.val = val;
1740 >                                else {
1741 >                                    count = 2;
1742 >                                    delta = 1;
1743 >                                    t.putTreeNode(h, k, val);
1744 >                                }
1745 >                            }
1746 >                            else if (p != null) {
1747 >                                delta = -1;
1748 >                                t.deleteTreeNode(p);
1749 >                            }
1750 >                        }
1751 >                    } finally {
1752 >                        t.release(0);
1753 >                    }
1754 >                    if (count != 0)
1755 >                        break;
1756 >                }
1757 >                else
1758 >                    tab = (Node[])fk;
1759 >            }
1760 >            else if ((fh & LOCKED) != 0) {
1761 >                checkForResize();
1762 >                f.tryAwaitLock(tab, i);
1763 >            }
1764 >            else if (f.casHash(fh, fh | LOCKED)) {
1765 >                try {
1766 >                    if (tabAt(tab, i) == f) {
1767 >                        count = 1;
1768 >                        for (Node e = f, pred = null;; ++count) {
1769 >                            Object ek, ev;
1770 >                            if ((e.hash & HASH_BITS) == h &&
1771 >                                (ev = e.val) != null &&
1772 >                                ((ek = e.key) == k || k.equals(ek))) {
1773 >                                val = mf.apply(k, (V)ev);
1774 >                                if (val != null)
1775 >                                    e.val = val;
1776 >                                else {
1777 >                                    delta = -1;
1778 >                                    Node en = e.next;
1779 >                                    if (pred != null)
1780 >                                        pred.next = en;
1781 >                                    else
1782 >                                        setTabAt(tab, i, en);
1783 >                                }
1784                                  break;
1785                              }
1786 <                            Node last = e;
1786 >                            pred = e;
1787                              if ((e = e.next) == null) {
1788 <                                if ((val = f.map(k)) != null) {
1789 <                                    last.next = new Node(h, k, val, null);
1790 <                                    added = true;
1791 <                                    if (last != first || tab.length <= 64)
1792 <                                        checkSize = true;
1788 >                                if (!onlyIfPresent && (val = mf.apply(k, null)) != null) {
1789 >                                    pred.next = new Node(h, k, val, null);
1790 >                                    delta = 1;
1791 >                                    if (count >= TREE_THRESHOLD)
1792 >                                        replaceWithTreeBin(tab, i, k);
1793                                  }
1794                                  break;
1795                              }
1796                          }
1797                      }
1798 +                } finally {
1799 +                    if (!f.casHash(fh | LOCKED, fh)) {
1800 +                        f.hash = fh;
1801 +                        synchronized (f) { f.notifyAll(); };
1802 +                    }
1803                  }
1804 <                if (validated) {
1805 <                    if (checkSize && tab.length < MAXIMUM_CAPACITY &&
1806 <                        resizing == 0 && counter.sum() >= (long)threshold)
713 <                        growTable();
1804 >                if (count != 0) {
1805 >                    if (tab.length <= 64)
1806 >                        count = 2;
1807                      break;
1808                  }
1809              }
1810          }
1811 <        if (added)
1812 <            counter.increment();
1811 >        if (delta != 0) {
1812 >            counter.add((long)delta);
1813 >            if (count > 1)
1814 >                checkForResize();
1815 >        }
1816          return val;
1817      }
1818  
1819 +    /** Implementation for merge */
1820 +    @SuppressWarnings("unchecked") private final Object internalMerge
1821 +        (K k, V v, BiFun<? super V, ? super V, ? extends V> mf) {
1822 +        int h = spread(k.hashCode());
1823 +        Object val = null;
1824 +        int delta = 0;
1825 +        int count = 0;
1826 +        for (Node[] tab = table;;) {
1827 +            int i; Node f; int fh; Object fk, fv;
1828 +            if (tab == null)
1829 +                tab = initTable();
1830 +            else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1831 +                if (casTabAt(tab, i, null, new Node(h, k, v, null))) {
1832 +                    delta = 1;
1833 +                    val = v;
1834 +                    break;
1835 +                }
1836 +            }
1837 +            else if ((fh = f.hash) == MOVED) {
1838 +                if ((fk = f.key) instanceof TreeBin) {
1839 +                    TreeBin t = (TreeBin)fk;
1840 +                    t.acquire(0);
1841 +                    try {
1842 +                        if (tabAt(tab, i) == f) {
1843 +                            count = 1;
1844 +                            TreeNode p = t.getTreeNode(h, k, t.root);
1845 +                            val = (p == null) ? v : mf.apply((V)p.val, v);
1846 +                            if (val != null) {
1847 +                                if (p != null)
1848 +                                    p.val = val;
1849 +                                else {
1850 +                                    count = 2;
1851 +                                    delta = 1;
1852 +                                    t.putTreeNode(h, k, val);
1853 +                                }
1854 +                            }
1855 +                            else if (p != null) {
1856 +                                delta = -1;
1857 +                                t.deleteTreeNode(p);
1858 +                            }
1859 +                        }
1860 +                    } finally {
1861 +                        t.release(0);
1862 +                    }
1863 +                    if (count != 0)
1864 +                        break;
1865 +                }
1866 +                else
1867 +                    tab = (Node[])fk;
1868 +            }
1869 +            else if ((fh & LOCKED) != 0) {
1870 +                checkForResize();
1871 +                f.tryAwaitLock(tab, i);
1872 +            }
1873 +            else if (f.casHash(fh, fh | LOCKED)) {
1874 +                try {
1875 +                    if (tabAt(tab, i) == f) {
1876 +                        count = 1;
1877 +                        for (Node e = f, pred = null;; ++count) {
1878 +                            Object ek, ev;
1879 +                            if ((e.hash & HASH_BITS) == h &&
1880 +                                (ev = e.val) != null &&
1881 +                                ((ek = e.key) == k || k.equals(ek))) {
1882 +                                val = mf.apply(v, (V)ev);
1883 +                                if (val != null)
1884 +                                    e.val = val;
1885 +                                else {
1886 +                                    delta = -1;
1887 +                                    Node en = e.next;
1888 +                                    if (pred != null)
1889 +                                        pred.next = en;
1890 +                                    else
1891 +                                        setTabAt(tab, i, en);
1892 +                                }
1893 +                                break;
1894 +                            }
1895 +                            pred = e;
1896 +                            if ((e = e.next) == null) {
1897 +                                val = v;
1898 +                                pred.next = new Node(h, k, val, null);
1899 +                                delta = 1;
1900 +                                if (count >= TREE_THRESHOLD)
1901 +                                    replaceWithTreeBin(tab, i, k);
1902 +                                break;
1903 +                            }
1904 +                        }
1905 +                    }
1906 +                } finally {
1907 +                    if (!f.casHash(fh | LOCKED, fh)) {
1908 +                        f.hash = fh;
1909 +                        synchronized (f) { f.notifyAll(); };
1910 +                    }
1911 +                }
1912 +                if (count != 0) {
1913 +                    if (tab.length <= 64)
1914 +                        count = 2;
1915 +                    break;
1916 +                }
1917 +            }
1918 +        }
1919 +        if (delta != 0) {
1920 +            counter.add((long)delta);
1921 +            if (count > 1)
1922 +                checkForResize();
1923 +        }
1924 +        return val;
1925 +    }
1926 +
1927 +    /** Implementation for putAll */
1928 +    private final void internalPutAll(Map<?, ?> m) {
1929 +        tryPresize(m.size());
1930 +        long delta = 0L;     // number of uncommitted additions
1931 +        boolean npe = false; // to throw exception on exit for nulls
1932 +        try {                // to clean up counts on other exceptions
1933 +            for (Map.Entry<?, ?> entry : m.entrySet()) {
1934 +                Object k, v;
1935 +                if (entry == null || (k = entry.getKey()) == null ||
1936 +                    (v = entry.getValue()) == null) {
1937 +                    npe = true;
1938 +                    break;
1939 +                }
1940 +                int h = spread(k.hashCode());
1941 +                for (Node[] tab = table;;) {
1942 +                    int i; Node f; int fh; Object fk;
1943 +                    if (tab == null)
1944 +                        tab = initTable();
1945 +                    else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null){
1946 +                        if (casTabAt(tab, i, null, new Node(h, k, v, null))) {
1947 +                            ++delta;
1948 +                            break;
1949 +                        }
1950 +                    }
1951 +                    else if ((fh = f.hash) == MOVED) {
1952 +                        if ((fk = f.key) instanceof TreeBin) {
1953 +                            TreeBin t = (TreeBin)fk;
1954 +                            boolean validated = false;
1955 +                            t.acquire(0);
1956 +                            try {
1957 +                                if (tabAt(tab, i) == f) {
1958 +                                    validated = true;
1959 +                                    TreeNode p = t.getTreeNode(h, k, t.root);
1960 +                                    if (p != null)
1961 +                                        p.val = v;
1962 +                                    else {
1963 +                                        t.putTreeNode(h, k, v);
1964 +                                        ++delta;
1965 +                                    }
1966 +                                }
1967 +                            } finally {
1968 +                                t.release(0);
1969 +                            }
1970 +                            if (validated)
1971 +                                break;
1972 +                        }
1973 +                        else
1974 +                            tab = (Node[])fk;
1975 +                    }
1976 +                    else if ((fh & LOCKED) != 0) {
1977 +                        counter.add(delta);
1978 +                        delta = 0L;
1979 +                        checkForResize();
1980 +                        f.tryAwaitLock(tab, i);
1981 +                    }
1982 +                    else if (f.casHash(fh, fh | LOCKED)) {
1983 +                        int count = 0;
1984 +                        try {
1985 +                            if (tabAt(tab, i) == f) {
1986 +                                count = 1;
1987 +                                for (Node e = f;; ++count) {
1988 +                                    Object ek, ev;
1989 +                                    if ((e.hash & HASH_BITS) == h &&
1990 +                                        (ev = e.val) != null &&
1991 +                                        ((ek = e.key) == k || k.equals(ek))) {
1992 +                                        e.val = v;
1993 +                                        break;
1994 +                                    }
1995 +                                    Node last = e;
1996 +                                    if ((e = e.next) == null) {
1997 +                                        ++delta;
1998 +                                        last.next = new Node(h, k, v, null);
1999 +                                        if (count >= TREE_THRESHOLD)
2000 +                                            replaceWithTreeBin(tab, i, k);
2001 +                                        break;
2002 +                                    }
2003 +                                }
2004 +                            }
2005 +                        } finally {
2006 +                            if (!f.casHash(fh | LOCKED, fh)) {
2007 +                                f.hash = fh;
2008 +                                synchronized (f) { f.notifyAll(); };
2009 +                            }
2010 +                        }
2011 +                        if (count != 0) {
2012 +                            if (count > 1) {
2013 +                                counter.add(delta);
2014 +                                delta = 0L;
2015 +                                checkForResize();
2016 +                            }
2017 +                            break;
2018 +                        }
2019 +                    }
2020 +                }
2021 +            }
2022 +        } finally {
2023 +            if (delta != 0)
2024 +                counter.add(delta);
2025 +        }
2026 +        if (npe)
2027 +            throw new NullPointerException();
2028 +    }
2029 +
2030 +    /* ---------------- Table Initialization and Resizing -------------- */
2031 +
2032      /**
2033 <     * Implementation for clear. Steps through each bin, removing all nodes.
2033 >     * Returns a power of two table size for the given desired capacity.
2034 >     * See Hackers Delight, sec 3.2
2035 >     */
2036 >    private static final int tableSizeFor(int c) {
2037 >        int n = c - 1;
2038 >        n |= n >>> 1;
2039 >        n |= n >>> 2;
2040 >        n |= n >>> 4;
2041 >        n |= n >>> 8;
2042 >        n |= n >>> 16;
2043 >        return (n < 0) ? 1 : (n >= MAXIMUM_CAPACITY) ? MAXIMUM_CAPACITY : n + 1;
2044 >    }
2045 >
2046 >    /**
2047 >     * Initializes table, using the size recorded in sizeCtl.
2048 >     */
2049 >    private final Node[] initTable() {
2050 >        Node[] tab; int sc;
2051 >        while ((tab = table) == null) {
2052 >            if ((sc = sizeCtl) < 0)
2053 >                Thread.yield(); // lost initialization race; just spin
2054 >            else if (UNSAFE.compareAndSwapInt(this, sizeCtlOffset, sc, -1)) {
2055 >                try {
2056 >                    if ((tab = table) == null) {
2057 >                        int n = (sc > 0) ? sc : DEFAULT_CAPACITY;
2058 >                        tab = table = new Node[n];
2059 >                        sc = n - (n >>> 2);
2060 >                    }
2061 >                } finally {
2062 >                    sizeCtl = sc;
2063 >                }
2064 >                break;
2065 >            }
2066 >        }
2067 >        return tab;
2068 >    }
2069 >
2070 >    /**
2071 >     * If table is too small and not already resizing, creates next
2072 >     * table and transfers bins.  Rechecks occupancy after a transfer
2073 >     * to see if another resize is already needed because resizings
2074 >     * are lagging additions.
2075 >     */
2076 >    private final void checkForResize() {
2077 >        Node[] tab; int n, sc;
2078 >        while ((tab = table) != null &&
2079 >               (n = tab.length) < MAXIMUM_CAPACITY &&
2080 >               (sc = sizeCtl) >= 0 && counter.sum() >= (long)sc &&
2081 >               UNSAFE.compareAndSwapInt(this, sizeCtlOffset, sc, -1)) {
2082 >            try {
2083 >                if (tab == table) {
2084 >                    table = rebuild(tab);
2085 >                    sc = (n << 1) - (n >>> 1);
2086 >                }
2087 >            } finally {
2088 >                sizeCtl = sc;
2089 >            }
2090 >        }
2091 >    }
2092 >
2093 >    /**
2094 >     * Tries to presize table to accommodate the given number of elements.
2095 >     *
2096 >     * @param size number of elements (doesn't need to be perfectly accurate)
2097 >     */
2098 >    private final void tryPresize(int size) {
2099 >        int c = (size >= (MAXIMUM_CAPACITY >>> 1)) ? MAXIMUM_CAPACITY :
2100 >            tableSizeFor(size + (size >>> 1) + 1);
2101 >        int sc;
2102 >        while ((sc = sizeCtl) >= 0) {
2103 >            Node[] tab = table; int n;
2104 >            if (tab == null || (n = tab.length) == 0) {
2105 >                n = (sc > c) ? sc : c;
2106 >                if (UNSAFE.compareAndSwapInt(this, sizeCtlOffset, sc, -1)) {
2107 >                    try {
2108 >                        if (table == tab) {
2109 >                            table = new Node[n];
2110 >                            sc = n - (n >>> 2);
2111 >                        }
2112 >                    } finally {
2113 >                        sizeCtl = sc;
2114 >                    }
2115 >                }
2116 >            }
2117 >            else if (c <= sc || n >= MAXIMUM_CAPACITY)
2118 >                break;
2119 >            else if (UNSAFE.compareAndSwapInt(this, sizeCtlOffset, sc, -1)) {
2120 >                try {
2121 >                    if (table == tab) {
2122 >                        table = rebuild(tab);
2123 >                        sc = (n << 1) - (n >>> 1);
2124 >                    }
2125 >                } finally {
2126 >                    sizeCtl = sc;
2127 >                }
2128 >            }
2129 >        }
2130 >    }
2131 >
2132 >    /*
2133 >     * Moves and/or copies the nodes in each bin to new table. See
2134 >     * above for explanation.
2135 >     *
2136 >     * @return the new table
2137 >     */
2138 >    private static final Node[] rebuild(Node[] tab) {
2139 >        int n = tab.length;
2140 >        Node[] nextTab = new Node[n << 1];
2141 >        Node fwd = new Node(MOVED, nextTab, null, null);
2142 >        int[] buffer = null;       // holds bins to revisit; null until needed
2143 >        Node rev = null;           // reverse forwarder; null until needed
2144 >        int nbuffered = 0;         // the number of bins in buffer list
2145 >        int bufferIndex = 0;       // buffer index of current buffered bin
2146 >        int bin = n - 1;           // current non-buffered bin or -1 if none
2147 >
2148 >        for (int i = bin;;) {      // start upwards sweep
2149 >            int fh; Node f;
2150 >            if ((f = tabAt(tab, i)) == null) {
2151 >                if (bin >= 0) {    // Unbuffered; no lock needed (or available)
2152 >                    if (!casTabAt(tab, i, f, fwd))
2153 >                        continue;
2154 >                }
2155 >                else {             // transiently use a locked forwarding node
2156 >                    Node g = new Node(MOVED|LOCKED, nextTab, null, null);
2157 >                    if (!casTabAt(tab, i, f, g))
2158 >                        continue;
2159 >                    setTabAt(nextTab, i, null);
2160 >                    setTabAt(nextTab, i + n, null);
2161 >                    setTabAt(tab, i, fwd);
2162 >                    if (!g.casHash(MOVED|LOCKED, MOVED)) {
2163 >                        g.hash = MOVED;
2164 >                        synchronized (g) { g.notifyAll(); }
2165 >                    }
2166 >                }
2167 >            }
2168 >            else if ((fh = f.hash) == MOVED) {
2169 >                Object fk = f.key;
2170 >                if (fk instanceof TreeBin) {
2171 >                    TreeBin t = (TreeBin)fk;
2172 >                    boolean validated = false;
2173 >                    t.acquire(0);
2174 >                    try {
2175 >                        if (tabAt(tab, i) == f) {
2176 >                            validated = true;
2177 >                            splitTreeBin(nextTab, i, t);
2178 >                            setTabAt(tab, i, fwd);
2179 >                        }
2180 >                    } finally {
2181 >                        t.release(0);
2182 >                    }
2183 >                    if (!validated)
2184 >                        continue;
2185 >                }
2186 >            }
2187 >            else if ((fh & LOCKED) == 0 && f.casHash(fh, fh|LOCKED)) {
2188 >                boolean validated = false;
2189 >                try {              // split to lo and hi lists; copying as needed
2190 >                    if (tabAt(tab, i) == f) {
2191 >                        validated = true;
2192 >                        splitBin(nextTab, i, f);
2193 >                        setTabAt(tab, i, fwd);
2194 >                    }
2195 >                } finally {
2196 >                    if (!f.casHash(fh | LOCKED, fh)) {
2197 >                        f.hash = fh;
2198 >                        synchronized (f) { f.notifyAll(); };
2199 >                    }
2200 >                }
2201 >                if (!validated)
2202 >                    continue;
2203 >            }
2204 >            else {
2205 >                if (buffer == null) // initialize buffer for revisits
2206 >                    buffer = new int[TRANSFER_BUFFER_SIZE];
2207 >                if (bin < 0 && bufferIndex > 0) {
2208 >                    int j = buffer[--bufferIndex];
2209 >                    buffer[bufferIndex] = i;
2210 >                    i = j;         // swap with another bin
2211 >                    continue;
2212 >                }
2213 >                if (bin < 0 || nbuffered >= TRANSFER_BUFFER_SIZE) {
2214 >                    f.tryAwaitLock(tab, i);
2215 >                    continue;      // no other options -- block
2216 >                }
2217 >                if (rev == null)   // initialize reverse-forwarder
2218 >                    rev = new Node(MOVED, tab, null, null);
2219 >                if (tabAt(tab, i) != f || (f.hash & LOCKED) == 0)
2220 >                    continue;      // recheck before adding to list
2221 >                buffer[nbuffered++] = i;
2222 >                setTabAt(nextTab, i, rev);     // install place-holders
2223 >                setTabAt(nextTab, i + n, rev);
2224 >            }
2225 >
2226 >            if (bin > 0)
2227 >                i = --bin;
2228 >            else if (buffer != null && nbuffered > 0) {
2229 >                bin = -1;
2230 >                i = buffer[bufferIndex = --nbuffered];
2231 >            }
2232 >            else
2233 >                return nextTab;
2234 >        }
2235 >    }
2236 >
2237 >    /**
2238 >     * Splits a normal bin with list headed by e into lo and hi parts;
2239 >     * installs in given table.
2240 >     */
2241 >    private static void splitBin(Node[] nextTab, int i, Node e) {
2242 >        int bit = nextTab.length >>> 1; // bit to split on
2243 >        int runBit = e.hash & bit;
2244 >        Node lastRun = e, lo = null, hi = null;
2245 >        for (Node p = e.next; p != null; p = p.next) {
2246 >            int b = p.hash & bit;
2247 >            if (b != runBit) {
2248 >                runBit = b;
2249 >                lastRun = p;
2250 >            }
2251 >        }
2252 >        if (runBit == 0)
2253 >            lo = lastRun;
2254 >        else
2255 >            hi = lastRun;
2256 >        for (Node p = e; p != lastRun; p = p.next) {
2257 >            int ph = p.hash & HASH_BITS;
2258 >            Object pk = p.key, pv = p.val;
2259 >            if ((ph & bit) == 0)
2260 >                lo = new Node(ph, pk, pv, lo);
2261 >            else
2262 >                hi = new Node(ph, pk, pv, hi);
2263 >        }
2264 >        setTabAt(nextTab, i, lo);
2265 >        setTabAt(nextTab, i + bit, hi);
2266 >    }
2267 >
2268 >    /**
2269 >     * Splits a tree bin into lo and hi parts; installs in given table.
2270 >     */
2271 >    private static void splitTreeBin(Node[] nextTab, int i, TreeBin t) {
2272 >        int bit = nextTab.length >>> 1;
2273 >        TreeBin lt = new TreeBin();
2274 >        TreeBin ht = new TreeBin();
2275 >        int lc = 0, hc = 0;
2276 >        for (Node e = t.first; e != null; e = e.next) {
2277 >            int h = e.hash & HASH_BITS;
2278 >            Object k = e.key, v = e.val;
2279 >            if ((h & bit) == 0) {
2280 >                ++lc;
2281 >                lt.putTreeNode(h, k, v);
2282 >            }
2283 >            else {
2284 >                ++hc;
2285 >                ht.putTreeNode(h, k, v);
2286 >            }
2287 >        }
2288 >        Node ln, hn; // throw away trees if too small
2289 >        if (lc <= (TREE_THRESHOLD >>> 1)) {
2290 >            ln = null;
2291 >            for (Node p = lt.first; p != null; p = p.next)
2292 >                ln = new Node(p.hash, p.key, p.val, ln);
2293 >        }
2294 >        else
2295 >            ln = new Node(MOVED, lt, null, null);
2296 >        setTabAt(nextTab, i, ln);
2297 >        if (hc <= (TREE_THRESHOLD >>> 1)) {
2298 >            hn = null;
2299 >            for (Node p = ht.first; p != null; p = p.next)
2300 >                hn = new Node(p.hash, p.key, p.val, hn);
2301 >        }
2302 >        else
2303 >            hn = new Node(MOVED, ht, null, null);
2304 >        setTabAt(nextTab, i + bit, hn);
2305 >    }
2306 >
2307 >    /**
2308 >     * Implementation for clear. Steps through each bin, removing all
2309 >     * nodes.
2310       */
2311      private final void internalClear() {
2312          long delta = 0L; // negative number of deletions
2313          int i = 0;
2314          Node[] tab = table;
2315          while (tab != null && i < tab.length) {
2316 <            Node e = tabAt(tab, i);
2317 <            if (e == null)
2316 >            int fh; Object fk;
2317 >            Node f = tabAt(tab, i);
2318 >            if (f == null)
2319                  ++i;
2320 <            else if (e.hash < 0)
2321 <                tab = (Node[])e.key;
2322 <            else {
2323 <                boolean validated = false;
2324 <                synchronized (e) {
2325 <                    if (tabAt(tab, i) == e) {
2326 <                        validated = true;
2327 <                        Node en;
2328 <                        do {
2329 <                            en = e.next;
2330 <                            if (e.val != null) { // currently always true
2320 >            else if ((fh = f.hash) == MOVED) {
2321 >                if ((fk = f.key) instanceof TreeBin) {
2322 >                    TreeBin t = (TreeBin)fk;
2323 >                    t.acquire(0);
2324 >                    try {
2325 >                        if (tabAt(tab, i) == f) {
2326 >                            for (Node p = t.first; p != null; p = p.next) {
2327 >                                if (p.val != null) { // (currently always true)
2328 >                                    p.val = null;
2329 >                                    --delta;
2330 >                                }
2331 >                            }
2332 >                            t.first = null;
2333 >                            t.root = null;
2334 >                            ++i;
2335 >                        }
2336 >                    } finally {
2337 >                        t.release(0);
2338 >                    }
2339 >                }
2340 >                else
2341 >                    tab = (Node[])fk;
2342 >            }
2343 >            else if ((fh & LOCKED) != 0) {
2344 >                counter.add(delta); // opportunistically update count
2345 >                delta = 0L;
2346 >                f.tryAwaitLock(tab, i);
2347 >            }
2348 >            else if (f.casHash(fh, fh | LOCKED)) {
2349 >                try {
2350 >                    if (tabAt(tab, i) == f) {
2351 >                        for (Node e = f; e != null; e = e.next) {
2352 >                            if (e.val != null) {  // (currently always true)
2353                                  e.val = null;
2354                                  --delta;
2355                              }
2356 <                        } while ((e = en) != null);
2356 >                        }
2357                          setTabAt(tab, i, null);
2358 +                        ++i;
2359 +                    }
2360 +                } finally {
2361 +                    if (!f.casHash(fh | LOCKED, fh)) {
2362 +                        f.hash = fh;
2363 +                        synchronized (f) { f.notifyAll(); };
2364                      }
2365                  }
752                if (validated)
753                    ++i;
2366              }
2367          }
2368 <        counter.add(delta);
2368 >        if (delta != 0)
2369 >            counter.add(delta);
2370      }
2371  
2372      /* ----------------Table Traversal -------------- */
2373  
2374      /**
2375       * Encapsulates traversal for methods such as containsValue; also
2376 <     * serves as a base class for other iterators.
2376 >     * serves as a base class for other iterators and bulk tasks.
2377       *
2378       * At each step, the iterator snapshots the key ("nextKey") and
2379       * value ("nextVal") of a valid node (i.e., one that, at point of
2380 <     * snapshot, has a nonnull user value). Because val fields can
2380 >     * snapshot, has a non-null user value). Because val fields can
2381       * change (including to null, indicating deletion), field nextVal
2382       * might not be accurate at point of use, but still maintains the
2383       * weak consistency property of holding a value that was once
2384 <     * valid.
2384 >     * valid. To support iterator.remove, the nextKey field is not
2385 >     * updated (nulled out) when the iterator cannot advance.
2386       *
2387       * Internal traversals directly access these fields, as in:
2388 <     * {@code while (it.next != null) { process(nextKey); it.advance(); }}
2388 >     * {@code while (it.advance() != null) { process(it.nextKey); }}
2389       *
2390 <     * Exported iterators (subclasses of ViewIterator) extract key,
2391 <     * value, or key-value pairs as return values of Iterator.next(),
2392 <     * and encapsulate the it.next check as hasNext();
2393 <     *
2394 <     * The iterator visits each valid node that was reachable upon
2395 <     * iterator construction once. It might miss some that were added
2396 <     * to a bin after the bin was visited, which is OK wrt consistency
2397 <     * guarantees. Maintaining this property in the face of possible
2398 <     * ongoing resizes requires a fair amount of bookkeeping state
2399 <     * that is difficult to optimize away amidst volatile accesses.
2400 <     * Even so, traversal maintains reasonable throughput.
2390 >     * Exported iterators must track whether the iterator has advanced
2391 >     * (in hasNext vs next) (by setting/checking/nulling field
2392 >     * nextVal), and then extract key, value, or key-value pairs as
2393 >     * return values of next().
2394 >     *
2395 >     * The iterator visits once each still-valid node that was
2396 >     * reachable upon iterator construction. It might miss some that
2397 >     * were added to a bin after the bin was visited, which is OK wrt
2398 >     * consistency guarantees. Maintaining this property in the face
2399 >     * of possible ongoing resizes requires a fair amount of
2400 >     * bookkeeping state that is difficult to optimize away amidst
2401 >     * volatile accesses.  Even so, traversal maintains reasonable
2402 >     * throughput.
2403       *
2404       * Normally, iteration proceeds bin-by-bin traversing lists.
2405       * However, if the table has been resized, then all future steps
# Line 793 | Line 2409 | public class ConcurrentHashMapV8<K, V>
2409       * across threads, iteration terminates if a bounds checks fails
2410       * for a table read.
2411       *
2412 <     * The range-based constructor enables creation of parallel
2413 <     * range-splitting traversals. (Not yet implemented.)
2412 >     * This class extends CountedCompleter to streamline parallel
2413 >     * iteration in bulk operations. This adds only a few fields of
2414 >     * space overhead, which is small enough in cases where it is not
2415 >     * needed to not worry about it.  Because CountedCompleter is
2416 >     * Serializable, but iterators need not be, we need to add warning
2417 >     * suppressions.
2418       */
2419 <    static class InternalIterator {
2419 >    @SuppressWarnings("serial") static class Traverser<K,V,R> extends CountedCompleter<R> {
2420 >        final ConcurrentHashMapV8<K, V> map;
2421          Node next;           // the next entry to use
801        Node last;           // the last entry used
2422          Object nextKey;      // cached key field of next
2423          Object nextVal;      // cached val field of next
2424          Node[] tab;          // current table; updated if resized
2425          int index;           // index of bin to use next
2426          int baseIndex;       // current index of initial table
2427 <        final int baseLimit; // index bound for initial table
2428 <        final int baseSize;  // initial table size
2427 >        int baseLimit;       // index bound for initial table
2428 >        int baseSize;        // initial table size
2429 >        int batch;           // split control
2430  
2431          /** Creates iterator for all entries in the table. */
2432 <        InternalIterator(Node[] tab) {
2433 <            this.tab = tab;
2434 <            baseLimit = baseSize = (tab == null) ? 0 : tab.length;
2435 <            index = baseIndex = 0;
2436 <            next = null;
2437 <            advance();
2438 <        }
2439 <
2440 <        /** Creates iterator for the given range of the table */
2441 <        InternalIterator(Node[] tab, int lo, int hi) {
2442 <            this.tab = tab;
2443 <            baseSize = (tab == null) ? 0 : tab.length;
2444 <            baseLimit = (hi <= baseSize) ? hi : baseSize;
2445 <            index = baseIndex = lo;
2446 <            next = null;
2447 <            advance();
2448 <        }
2449 <
2450 <        /** Advances next. See above for explanation. */
2451 <        final void advance() {
2452 <            Node e = last = next;
2432 >        Traverser(ConcurrentHashMapV8<K, V> map) {
2433 >            this.map = map;
2434 >        }
2435 >
2436 >        /** Creates iterator for split() methods and task constructors */
2437 >        Traverser(ConcurrentHashMapV8<K,V> map, Traverser<K,V,?> it, int batch) {
2438 >            super(it);
2439 >            this.batch = batch;
2440 >            if ((this.map = map) != null && it != null) { // split parent
2441 >                Node[] t;
2442 >                if ((t = it.tab) == null &&
2443 >                    (t = it.tab = map.table) != null)
2444 >                    it.baseLimit = it.baseSize = t.length;
2445 >                this.tab = t;
2446 >                this.baseSize = it.baseSize;
2447 >                int hi = this.baseLimit = it.baseLimit;
2448 >                it.baseLimit = this.index = this.baseIndex =
2449 >                    (hi + it.baseIndex + 1) >>> 1;
2450 >            }
2451 >        }
2452 >
2453 >        /**
2454 >         * Advances next; returns nextVal or null if terminated.
2455 >         * See above for explanation.
2456 >         */
2457 >        final Object advance() {
2458 >            Node e = next;
2459 >            Object ev = null;
2460              outer: do {
2461 <                if (e != null)                   // pass used or skipped node
2461 >                if (e != null)                  // advance past used/skipped node
2462                      e = e.next;
2463 <                while (e == null) {              // get to next non-null bin
2464 <                    Node[] t; int b, i, n;       // checks must use locals
2465 <                    if ((b = baseIndex) >= baseLimit || (i = index) < 0 ||
2466 <                        (t = tab) == null || i >= (n = t.length))
2463 >                while (e == null) {             // get to next non-null bin
2464 >                    ConcurrentHashMapV8<K, V> m;
2465 >                    Node[] t; int b, i, n; Object ek; // checks must use locals
2466 >                    if ((t = tab) != null)
2467 >                        n = t.length;
2468 >                    else if ((m = map) != null && (t = tab = m.table) != null)
2469 >                        n = baseLimit = baseSize = t.length;
2470 >                    else
2471 >                        break outer;
2472 >                    if ((b = baseIndex) >= baseLimit ||
2473 >                        (i = index) < 0 || i >= n)
2474                          break outer;
2475 <                    else if ((e = tabAt(t, i)) != null && e.hash < 0)
2476 <                        tab = (Node[])e.key;     // restarts due to null val
2477 <                    else                         // visit upper slots if present
2478 <                        index = (i += baseSize) < n ? i : (baseIndex = b + 1);
2475 >                    if ((e = tabAt(t, i)) != null && e.hash == MOVED) {
2476 >                        if ((ek = e.key) instanceof TreeBin)
2477 >                            e = ((TreeBin)ek).first;
2478 >                        else {
2479 >                            tab = (Node[])ek;
2480 >                            continue;           // restarts due to null val
2481 >                        }
2482 >                    }                           // visit upper slots if present
2483 >                    index = (i += baseSize) < n ? i : (baseIndex = b + 1);
2484                  }
2485                  nextKey = e.key;
2486 <            } while ((nextVal = e.val) == null); // skip deleted or special nodes
2486 >            } while ((ev = e.val) == null);    // skip deleted or special nodes
2487              next = e;
2488 +            return nextVal = ev;
2489 +        }
2490 +
2491 +        public final void remove() {
2492 +            Object k = nextKey;
2493 +            if (k == null && (advance() == null || (k = nextKey) == null))
2494 +                throw new IllegalStateException();
2495 +            map.internalReplace(k, null, null);
2496          }
2497 +
2498 +        public final boolean hasNext() {
2499 +            return nextVal != null || advance() != null;
2500 +        }
2501 +
2502 +        public final boolean hasMoreElements() { return hasNext(); }
2503 +
2504 +        public void compute() { } // default no-op CountedCompleter body
2505 +
2506 +        /**
2507 +         * Returns a batch value > 0 if this task should (and must) be
2508 +         * split, if so, adding to pending count, and in any case
2509 +         * updating batch value. The initial batch value is approx
2510 +         * exp2 of the number of times (minus one) to split task by
2511 +         * two before executing leaf action. This value is faster to
2512 +         * compute and more convenient to use as a guide to splitting
2513 +         * than is the depth, since it is used while dividing by two
2514 +         * anyway.
2515 +         */
2516 +        final int preSplit() {
2517 +            ConcurrentHashMapV8<K, V> m; int b; Node[] t;  ForkJoinPool pool;
2518 +            if ((b = batch) < 0 && (m = map) != null) { // force initialization
2519 +                if ((t = tab) == null && (t = tab = m.table) != null)
2520 +                    baseLimit = baseSize = t.length;
2521 +                if (t != null) {
2522 +                    long n = m.counter.sum();
2523 +                    int par = ((pool = getPool()) == null) ?
2524 +                        ForkJoinPool.getCommonPoolParallelism() :
2525 +                        pool.getParallelism();
2526 +                    int sp = par << 3; // slack of 8
2527 +                    b = (n <= 0L) ? 0 : (n < (long)sp) ? (int)n : sp;
2528 +                }
2529 +            }
2530 +            b = (b <= 1 || baseIndex == baseLimit) ? 0 : (b >>> 1);
2531 +            if ((batch = b) > 0)
2532 +                addToPendingCount(1);
2533 +            return b;
2534 +        }
2535 +
2536      }
2537  
2538      /* ---------------- Public operations -------------- */
2539  
2540      /**
2541 <     * Creates a new, empty map with the default initial table size (16),
2541 >     * Creates a new, empty map with the default initial table size (16).
2542       */
2543      public ConcurrentHashMapV8() {
2544          this.counter = new LongAdder();
858        this.targetCapacity = DEFAULT_CAPACITY;
2545      }
2546  
2547      /**
# Line 866 | Line 2552 | public class ConcurrentHashMapV8<K, V>
2552       * @param initialCapacity The implementation performs internal
2553       * sizing to accommodate this many elements.
2554       * @throws IllegalArgumentException if the initial capacity of
2555 <     * elements is negative.
2555 >     * elements is negative
2556       */
2557      public ConcurrentHashMapV8(int initialCapacity) {
2558          if (initialCapacity < 0)
# Line 875 | Line 2561 | public class ConcurrentHashMapV8<K, V>
2561                     MAXIMUM_CAPACITY :
2562                     tableSizeFor(initialCapacity + (initialCapacity >>> 1) + 1));
2563          this.counter = new LongAdder();
2564 <        this.targetCapacity = cap;
2564 >        this.sizeCtl = cap;
2565      }
2566  
2567      /**
# Line 885 | Line 2571 | public class ConcurrentHashMapV8<K, V>
2571       */
2572      public ConcurrentHashMapV8(Map<? extends K, ? extends V> m) {
2573          this.counter = new LongAdder();
2574 <        this.targetCapacity = DEFAULT_CAPACITY;
2575 <        putAll(m);
2574 >        this.sizeCtl = DEFAULT_CAPACITY;
2575 >        internalPutAll(m);
2576      }
2577  
2578      /**
# Line 898 | Line 2584 | public class ConcurrentHashMapV8<K, V>
2584       * performs internal sizing to accommodate this many elements,
2585       * given the specified load factor.
2586       * @param loadFactor the load factor (table density) for
2587 <     * establishing the initial table size.
2587 >     * establishing the initial table size
2588       * @throws IllegalArgumentException if the initial capacity of
2589       * elements is negative or the load factor is nonpositive
2590       *
# Line 918 | Line 2604 | public class ConcurrentHashMapV8<K, V>
2604       * performs internal sizing to accommodate this many elements,
2605       * given the specified load factor.
2606       * @param loadFactor the load factor (table density) for
2607 <     * establishing the initial table size.
2607 >     * establishing the initial table size
2608       * @param concurrencyLevel the estimated number of concurrently
2609       * updating threads. The implementation may use this value as
2610       * a sizing hint.
2611       * @throws IllegalArgumentException if the initial capacity is
2612       * negative or the load factor or concurrencyLevel are
2613 <     * nonpositive.
2613 >     * nonpositive
2614       */
2615      public ConcurrentHashMapV8(int initialCapacity,
2616                                 float loadFactor, int concurrencyLevel) {
# Line 933 | Line 2619 | public class ConcurrentHashMapV8<K, V>
2619          if (initialCapacity < concurrencyLevel)   // Use at least as many bins
2620              initialCapacity = concurrencyLevel;   // as estimated threads
2621          long size = (long)(1.0 + (long)initialCapacity / loadFactor);
2622 <        int cap =  ((size >= (long)MAXIMUM_CAPACITY) ?
2623 <                    MAXIMUM_CAPACITY: tableSizeFor((int)size));
2622 >        int cap = (size >= (long)MAXIMUM_CAPACITY) ?
2623 >            MAXIMUM_CAPACITY : tableSizeFor((int)size);
2624          this.counter = new LongAdder();
2625 <        this.targetCapacity = cap;
2625 >        this.sizeCtl = cap;
2626 >    }
2627 >
2628 >    /**
2629 >     * Creates a new {@link Set} backed by a ConcurrentHashMapV8
2630 >     * from the given type to {@code Boolean.TRUE}.
2631 >     *
2632 >     * @return the new set
2633 >     */
2634 >    public static <K> KeySetView<K,Boolean> newKeySet() {
2635 >        return new KeySetView<K,Boolean>(new ConcurrentHashMapV8<K,Boolean>(),
2636 >                                      Boolean.TRUE);
2637 >    }
2638 >
2639 >    /**
2640 >     * Creates a new {@link Set} backed by a ConcurrentHashMapV8
2641 >     * from the given type to {@code Boolean.TRUE}.
2642 >     *
2643 >     * @param initialCapacity The implementation performs internal
2644 >     * sizing to accommodate this many elements.
2645 >     * @throws IllegalArgumentException if the initial capacity of
2646 >     * elements is negative
2647 >     * @return the new set
2648 >     */
2649 >    public static <K> KeySetView<K,Boolean> newKeySet(int initialCapacity) {
2650 >        return new KeySetView<K,Boolean>(new ConcurrentHashMapV8<K,Boolean>(initialCapacity),
2651 >                                      Boolean.TRUE);
2652      }
2653  
2654      /**
# Line 957 | Line 2669 | public class ConcurrentHashMapV8<K, V>
2669      }
2670  
2671      /**
2672 +     * Returns the number of mappings. This method should be used
2673 +     * instead of {@link #size} because a ConcurrentHashMapV8 may
2674 +     * contain more mappings than can be represented as an int. The
2675 +     * value returned is an estimate; the actual count may differ if
2676 +     * there are concurrent insertions or removals.
2677 +     *
2678 +     * @return the number of mappings
2679 +     */
2680 +    public long mappingCount() {
2681 +        long n = counter.sum();
2682 +        return (n < 0L) ? 0L : n; // ignore transient negative values
2683 +    }
2684 +
2685 +    /**
2686       * Returns the value to which the specified key is mapped,
2687       * or {@code null} if this map contains no mapping for the key.
2688       *
# Line 967 | Line 2693 | public class ConcurrentHashMapV8<K, V>
2693       *
2694       * @throws NullPointerException if the specified key is null
2695       */
2696 <    @SuppressWarnings("unchecked")
971 <    public V get(Object key) {
2696 >    @SuppressWarnings("unchecked") public V get(Object key) {
2697          if (key == null)
2698              throw new NullPointerException();
2699          return (V)internalGet(key);
2700      }
2701  
2702      /**
2703 +     * Returns the value to which the specified key is mapped,
2704 +     * or the given defaultValue if this map contains no mapping for the key.
2705 +     *
2706 +     * @param key the key
2707 +     * @param defaultValue the value to return if this map contains
2708 +     * no mapping for the given key
2709 +     * @return the mapping for the key, if present; else the defaultValue
2710 +     * @throws NullPointerException if the specified key is null
2711 +     */
2712 +    @SuppressWarnings("unchecked") public V getValueOrDefault(Object key, V defaultValue) {
2713 +        if (key == null)
2714 +            throw new NullPointerException();
2715 +        V v = (V) internalGet(key);
2716 +        return v == null ? defaultValue : v;
2717 +    }
2718 +
2719 +    /**
2720       * Tests if the specified object is a key in this table.
2721       *
2722       * @param  key   possible key
2723       * @return {@code true} if and only if the specified object
2724       *         is a key in this table, as determined by the
2725 <     *         {@code equals} method; {@code false} otherwise.
2725 >     *         {@code equals} method; {@code false} otherwise
2726       * @throws NullPointerException if the specified key is null
2727       */
2728      public boolean containsKey(Object key) {
# Line 1003 | Line 2745 | public class ConcurrentHashMapV8<K, V>
2745          if (value == null)
2746              throw new NullPointerException();
2747          Object v;
2748 <        InternalIterator it = new InternalIterator(table);
2749 <        while (it.next != null) {
2750 <            if ((v = it.nextVal) == value || value.equals(v))
2748 >        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
2749 >        while ((v = it.advance()) != null) {
2750 >            if (v == value || value.equals(v))
2751                  return true;
1010            it.advance();
2752          }
2753          return false;
2754      }
# Line 1035 | Line 2776 | public class ConcurrentHashMapV8<K, V>
2776       * Maps the specified key to the specified value in this table.
2777       * Neither the key nor the value can be null.
2778       *
2779 <     * <p> The value can be retrieved by calling the {@code get} method
2779 >     * <p>The value can be retrieved by calling the {@code get} method
2780       * with a key that is equal to the original key.
2781       *
2782       * @param key key with which the specified value is to be associated
# Line 1044 | Line 2785 | public class ConcurrentHashMapV8<K, V>
2785       *         {@code null} if there was no mapping for {@code key}
2786       * @throws NullPointerException if the specified key or value is null
2787       */
2788 <    @SuppressWarnings("unchecked")
1048 <    public V put(K key, V value) {
2788 >    @SuppressWarnings("unchecked") public V put(K key, V value) {
2789          if (key == null || value == null)
2790              throw new NullPointerException();
2791 <        return (V)internalPut(key, value, true);
2791 >        return (V)internalPut(key, value);
2792      }
2793  
2794      /**
# Line 1058 | Line 2798 | public class ConcurrentHashMapV8<K, V>
2798       *         or {@code null} if there was no mapping for the key
2799       * @throws NullPointerException if the specified key or value is null
2800       */
2801 <    @SuppressWarnings("unchecked")
1062 <    public V putIfAbsent(K key, V value) {
2801 >    @SuppressWarnings("unchecked") public V putIfAbsent(K key, V value) {
2802          if (key == null || value == null)
2803              throw new NullPointerException();
2804 <        return (V)internalPut(key, value, false);
2804 >        return (V)internalPutIfAbsent(key, value);
2805      }
2806  
2807      /**
# Line 1073 | Line 2812 | public class ConcurrentHashMapV8<K, V>
2812       * @param m mappings to be stored in this map
2813       */
2814      public void putAll(Map<? extends K, ? extends V> m) {
2815 <        if (m == null)
1077 <            throw new NullPointerException();
1078 <        /*
1079 <         * If uninitialized, try to adjust targetCapacity to
1080 <         * accommodate the given number of elements.
1081 <         */
1082 <        if (table == null) {
1083 <            int size = m.size();
1084 <            int cap = (size >= (MAXIMUM_CAPACITY >>> 1)) ? MAXIMUM_CAPACITY :
1085 <                tableSizeFor(size + (size >>> 1) + 1);
1086 <            if (cap > targetCapacity)
1087 <                targetCapacity = cap;
1088 <        }
1089 <        for (Map.Entry<? extends K, ? extends V> e : m.entrySet())
1090 <            put(e.getKey(), e.getValue());
2815 >        internalPutAll(m);
2816      }
2817  
2818      /**
2819       * If the specified key is not already associated with a value,
2820 <     * computes its value using the given mappingFunction, and if
2821 <     * non-null, enters it into the map.  This is equivalent to
2822 <     *  <pre> {@code
2820 >     * computes its value using the given mappingFunction and enters
2821 >     * it into the map unless null.  This is equivalent to
2822 >     * <pre> {@code
2823       * if (map.containsKey(key))
2824       *   return map.get(key);
2825 <     * value = mappingFunction.map(key);
2825 >     * value = mappingFunction.apply(key);
2826       * if (value != null)
2827       *   map.put(key, value);
2828       * return value;}</pre>
2829       *
2830 <     * except that the action is performed atomically.  Some attempted
2831 <     * update operations on this map by other threads may be blocked
2832 <     * while computation is in progress, so the computation should be
2833 <     * short and simple, and must not attempt to update any other
2834 <     * mappings of this Map. The most appropriate usage is to
2830 >     * except that the action is performed atomically.  If the
2831 >     * function returns {@code null} no mapping is recorded. If the
2832 >     * function itself throws an (unchecked) exception, the exception
2833 >     * is rethrown to its caller, and no mapping is recorded.  Some
2834 >     * attempted update operations on this map by other threads may be
2835 >     * blocked while computation is in progress, so the computation
2836 >     * should be short and simple, and must not attempt to update any
2837 >     * other mappings of this Map. The most appropriate usage is to
2838       * construct a new object serving as an initial mapped value, or
2839       * memoized result, as in:
2840 +     *
2841       *  <pre> {@code
2842 <     * map.computeIfAbsent(key, new MappingFunction<K, V>() {
2842 >     * map.computeIfAbsent(key, new Fun<K, V>() {
2843       *   public V map(K k) { return new Value(f(k)); }});}</pre>
2844       *
2845       * @param key key with which the specified value is to be associated
2846       * @param mappingFunction the function to compute a value
2847       * @return the current (existing or computed) value associated with
2848 <     *         the specified key, or {@code null} if the computation
1120 <     *         returned {@code null}.
2848 >     *         the specified key, or null if the computed value is null
2849       * @throws NullPointerException if the specified key or mappingFunction
2850 <     *         is null,
2850 >     *         is null
2851       * @throws IllegalStateException if the computation detectably
2852       *         attempts a recursive update to this map that would
2853 <     *         otherwise never complete.
2853 >     *         otherwise never complete
2854       * @throws RuntimeException or Error if the mappingFunction does so,
2855 <     *         in which case the mapping is left unestablished.
2855 >     *         in which case the mapping is left unestablished
2856       */
2857 <    public V computeIfAbsent(K key, MappingFunction<? super K, ? extends V> mappingFunction) {
2857 >    @SuppressWarnings("unchecked") public V computeIfAbsent
2858 >        (K key, Fun<? super K, ? extends V> mappingFunction) {
2859          if (key == null || mappingFunction == null)
2860              throw new NullPointerException();
2861 <        return internalCompute(key, mappingFunction, false);
2861 >        return (V)internalComputeIfAbsent(key, mappingFunction);
2862      }
2863  
2864      /**
2865 <     * Computes the value associated with the given key using the given
2866 <     * mappingFunction, and if non-null, enters it into the map.  This
1138 <     * is equivalent to
2865 >     * If the given key is present, computes a new mapping value given a key and
2866 >     * its current mapped value. This is equivalent to
2867       *  <pre> {@code
2868 <     * value = mappingFunction.map(key);
2869 <     * if (value != null)
2870 <     *   map.put(key, value);
2871 <     * else
2872 <     *   value = map.get(key);
2873 <     * return value;}</pre>
2868 >     *   if (map.containsKey(key)) {
2869 >     *     value = remappingFunction.apply(key, map.get(key));
2870 >     *     if (value != null)
2871 >     *       map.put(key, value);
2872 >     *     else
2873 >     *       map.remove(key);
2874 >     *   }
2875 >     * }</pre>
2876 >     *
2877 >     * except that the action is performed atomically.  If the
2878 >     * function returns {@code null}, the mapping is removed.  If the
2879 >     * function itself throws an (unchecked) exception, the exception
2880 >     * is rethrown to its caller, and the current mapping is left
2881 >     * unchanged.  Some attempted update operations on this map by
2882 >     * other threads may be blocked while computation is in progress,
2883 >     * so the computation should be short and simple, and must not
2884 >     * attempt to update any other mappings of this Map. For example,
2885 >     * to either create or append new messages to a value mapping:
2886       *
2887 <     * except that the action is performed atomically.  Some attempted
2888 <     * update operations on this map by other threads may be blocked
2889 <     * while computation is in progress, so the computation should be
2890 <     * short and simple, and must not attempt to update any other
2891 <     * mappings of this Map.
2887 >     * @param key key with which the specified value is to be associated
2888 >     * @param remappingFunction the function to compute a value
2889 >     * @return the new value associated with the specified key, or null if none
2890 >     * @throws NullPointerException if the specified key or remappingFunction
2891 >     *         is null
2892 >     * @throws IllegalStateException if the computation detectably
2893 >     *         attempts a recursive update to this map that would
2894 >     *         otherwise never complete
2895 >     * @throws RuntimeException or Error if the remappingFunction does so,
2896 >     *         in which case the mapping is unchanged
2897 >     */
2898 >    @SuppressWarnings("unchecked") public V computeIfPresent
2899 >        (K key, BiFun<? super K, ? super V, ? extends V> remappingFunction) {
2900 >        if (key == null || remappingFunction == null)
2901 >            throw new NullPointerException();
2902 >        return (V)internalCompute(key, true, remappingFunction);
2903 >    }
2904 >
2905 >    /**
2906 >     * Computes a new mapping value given a key and
2907 >     * its current mapped value (or {@code null} if there is no current
2908 >     * mapping). This is equivalent to
2909 >     *  <pre> {@code
2910 >     *   value = remappingFunction.apply(key, map.get(key));
2911 >     *   if (value != null)
2912 >     *     map.put(key, value);
2913 >     *   else
2914 >     *     map.remove(key);
2915 >     * }</pre>
2916 >     *
2917 >     * except that the action is performed atomically.  If the
2918 >     * function returns {@code null}, the mapping is removed.  If the
2919 >     * function itself throws an (unchecked) exception, the exception
2920 >     * is rethrown to its caller, and the current mapping is left
2921 >     * unchanged.  Some attempted update operations on this map by
2922 >     * other threads may be blocked while computation is in progress,
2923 >     * so the computation should be short and simple, and must not
2924 >     * attempt to update any other mappings of this Map. For example,
2925 >     * to either create or append new messages to a value mapping:
2926 >     *
2927 >     * <pre> {@code
2928 >     * Map<Key, String> map = ...;
2929 >     * final String msg = ...;
2930 >     * map.compute(key, new BiFun<Key, String, String>() {
2931 >     *   public String apply(Key k, String v) {
2932 >     *    return (v == null) ? msg : v + msg;});}}</pre>
2933       *
2934       * @param key key with which the specified value is to be associated
2935 <     * @param mappingFunction the function to compute a value
2936 <     * @return the current value associated with
2937 <     *         the specified key, or {@code null} if the computation
2938 <     *         returned {@code null} and the value was not otherwise present.
1158 <     * @throws NullPointerException if the specified key or mappingFunction
1159 <     *         is null,
2935 >     * @param remappingFunction the function to compute a value
2936 >     * @return the new value associated with the specified key, or null if none
2937 >     * @throws NullPointerException if the specified key or remappingFunction
2938 >     *         is null
2939       * @throws IllegalStateException if the computation detectably
2940       *         attempts a recursive update to this map that would
2941 <     *         otherwise never complete.
2942 <     * @throws RuntimeException or Error if the mappingFunction does so,
2943 <     *         in which case the mapping is unchanged.
2944 <     */
2945 <    public V compute(K key, MappingFunction<? super K, ? extends V> mappingFunction) {
2946 <        if (key == null || mappingFunction == null)
2941 >     *         otherwise never complete
2942 >     * @throws RuntimeException or Error if the remappingFunction does so,
2943 >     *         in which case the mapping is unchanged
2944 >     */
2945 >    @SuppressWarnings("unchecked") public V compute
2946 >        (K key, BiFun<? super K, ? super V, ? extends V> remappingFunction) {
2947 >        if (key == null || remappingFunction == null)
2948 >            throw new NullPointerException();
2949 >        return (V)internalCompute(key, false, remappingFunction);
2950 >    }
2951 >
2952 >    /**
2953 >     * If the specified key is not already associated
2954 >     * with a value, associate it with the given value.
2955 >     * Otherwise, replace the value with the results of
2956 >     * the given remapping function. This is equivalent to:
2957 >     *  <pre> {@code
2958 >     *   if (!map.containsKey(key))
2959 >     *     map.put(value);
2960 >     *   else {
2961 >     *     newValue = remappingFunction.apply(map.get(key), value);
2962 >     *     if (value != null)
2963 >     *       map.put(key, value);
2964 >     *     else
2965 >     *       map.remove(key);
2966 >     *   }
2967 >     * }</pre>
2968 >     * except that the action is performed atomically.  If the
2969 >     * function returns {@code null}, the mapping is removed.  If the
2970 >     * function itself throws an (unchecked) exception, the exception
2971 >     * is rethrown to its caller, and the current mapping is left
2972 >     * unchanged.  Some attempted update operations on this map by
2973 >     * other threads may be blocked while computation is in progress,
2974 >     * so the computation should be short and simple, and must not
2975 >     * attempt to update any other mappings of this Map.
2976 >     */
2977 >    @SuppressWarnings("unchecked") public V merge
2978 >        (K key, V value, BiFun<? super V, ? super V, ? extends V> remappingFunction) {
2979 >        if (key == null || value == null || remappingFunction == null)
2980              throw new NullPointerException();
2981 <        return internalCompute(key, mappingFunction, true);
2981 >        return (V)internalMerge(key, value, remappingFunction);
2982      }
2983  
2984      /**
# Line 1178 | Line 2990 | public class ConcurrentHashMapV8<K, V>
2990       *         {@code null} if there was no mapping for {@code key}
2991       * @throws NullPointerException if the specified key is null
2992       */
2993 <    @SuppressWarnings("unchecked")
1182 <    public V remove(Object key) {
2993 >    @SuppressWarnings("unchecked") public V remove(Object key) {
2994          if (key == null)
2995              throw new NullPointerException();
2996          return (V)internalReplace(key, null, null);
# Line 1216 | Line 3027 | public class ConcurrentHashMapV8<K, V>
3027       *         or {@code null} if there was no mapping for the key
3028       * @throws NullPointerException if the specified key or value is null
3029       */
3030 <    @SuppressWarnings("unchecked")
1220 <    public V replace(K key, V value) {
3030 >    @SuppressWarnings("unchecked") public V replace(K key, V value) {
3031          if (key == null || value == null)
3032              throw new NullPointerException();
3033          return (V)internalReplace(key, value, null);
# Line 1233 | Line 3043 | public class ConcurrentHashMapV8<K, V>
3043      /**
3044       * Returns a {@link Set} view of the keys contained in this map.
3045       * The set is backed by the map, so changes to the map are
3046 <     * reflected in the set, and vice-versa.  The set supports element
1237 <     * removal, which removes the corresponding mapping from this map,
1238 <     * via the {@code Iterator.remove}, {@code Set.remove},
1239 <     * {@code removeAll}, {@code retainAll}, and {@code clear}
1240 <     * operations.  It does not support the {@code add} or
1241 <     * {@code addAll} operations.
3046 >     * reflected in the set, and vice-versa.
3047       *
3048 <     * <p>The view's {@code iterator} is a "weakly consistent" iterator
1244 <     * that will never throw {@link ConcurrentModificationException},
1245 <     * and guarantees to traverse elements as they existed upon
1246 <     * construction of the iterator, and may (but is not guaranteed to)
1247 <     * reflect any modifications subsequent to construction.
3048 >     * @return the set view
3049       */
3050 <    public Set<K> keySet() {
3051 <        KeySet<K,V> ks = keySet;
3052 <        return (ks != null) ? ks : (keySet = new KeySet<K,V>(this));
3050 >    public KeySetView<K,V> keySet() {
3051 >        KeySetView<K,V> ks = keySet;
3052 >        return (ks != null) ? ks : (keySet = new KeySetView<K,V>(this, null));
3053 >    }
3054 >
3055 >    /**
3056 >     * Returns a {@link Set} view of the keys in this map, using the
3057 >     * given common mapped value for any additions (i.e., {@link
3058 >     * Collection#add} and {@link Collection#addAll}). This is of
3059 >     * course only appropriate if it is acceptable to use the same
3060 >     * value for all additions from this view.
3061 >     *
3062 >     * @param mappedValue the mapped value to use for any
3063 >     * additions.
3064 >     * @return the set view
3065 >     * @throws NullPointerException if the mappedValue is null
3066 >     */
3067 >    public KeySetView<K,V> keySet(V mappedValue) {
3068 >        if (mappedValue == null)
3069 >            throw new NullPointerException();
3070 >        return new KeySetView<K,V>(this, mappedValue);
3071      }
3072  
3073      /**
3074       * Returns a {@link Collection} view of the values contained in this map.
3075       * The collection is backed by the map, so changes to the map are
3076 <     * reflected in the collection, and vice-versa.  The collection
1258 <     * supports element removal, which removes the corresponding
1259 <     * mapping from this map, via the {@code Iterator.remove},
1260 <     * {@code Collection.remove}, {@code removeAll},
1261 <     * {@code retainAll}, and {@code clear} operations.  It does not
1262 <     * support the {@code add} or {@code addAll} operations.
1263 <     *
1264 <     * <p>The view's {@code iterator} is a "weakly consistent" iterator
1265 <     * that will never throw {@link ConcurrentModificationException},
1266 <     * and guarantees to traverse elements as they existed upon
1267 <     * construction of the iterator, and may (but is not guaranteed to)
1268 <     * reflect any modifications subsequent to construction.
3076 >     * reflected in the collection, and vice-versa.
3077       */
3078 <    public Collection<V> values() {
3079 <        Values<K,V> vs = values;
3080 <        return (vs != null) ? vs : (values = new Values<K,V>(this));
3078 >    public ValuesView<K,V> values() {
3079 >        ValuesView<K,V> vs = values;
3080 >        return (vs != null) ? vs : (values = new ValuesView<K,V>(this));
3081      }
3082  
3083      /**
# Line 1289 | Line 3097 | public class ConcurrentHashMapV8<K, V>
3097       * reflect any modifications subsequent to construction.
3098       */
3099      public Set<Map.Entry<K,V>> entrySet() {
3100 <        EntrySet<K,V> es = entrySet;
3101 <        return (es != null) ? es : (entrySet = new EntrySet<K,V>(this));
3100 >        EntrySetView<K,V> es = entrySet;
3101 >        return (es != null) ? es : (entrySet = new EntrySetView<K,V>(this));
3102      }
3103  
3104      /**
# Line 1314 | Line 3122 | public class ConcurrentHashMapV8<K, V>
3122      }
3123  
3124      /**
3125 +     * Returns a partitionable iterator of the keys in this map.
3126 +     *
3127 +     * @return a partitionable iterator of the keys in this map
3128 +     */
3129 +    public Spliterator<K> keySpliterator() {
3130 +        return new KeyIterator<K,V>(this);
3131 +    }
3132 +
3133 +    /**
3134 +     * Returns a partitionable iterator of the values in this map.
3135 +     *
3136 +     * @return a partitionable iterator of the values in this map
3137 +     */
3138 +    public Spliterator<V> valueSpliterator() {
3139 +        return new ValueIterator<K,V>(this);
3140 +    }
3141 +
3142 +    /**
3143 +     * Returns a partitionable iterator of the entries in this map.
3144 +     *
3145 +     * @return a partitionable iterator of the entries in this map
3146 +     */
3147 +    public Spliterator<Map.Entry<K,V>> entrySpliterator() {
3148 +        return new EntryIterator<K,V>(this);
3149 +    }
3150 +
3151 +    /**
3152       * Returns the hash code value for this {@link Map}, i.e.,
3153       * the sum of, for each key-value pair in the map,
3154       * {@code key.hashCode() ^ value.hashCode()}.
# Line 1322 | Line 3157 | public class ConcurrentHashMapV8<K, V>
3157       */
3158      public int hashCode() {
3159          int h = 0;
3160 <        InternalIterator it = new InternalIterator(table);
3161 <        while (it.next != null) {
3162 <            h += it.nextKey.hashCode() ^ it.nextVal.hashCode();
3163 <            it.advance();
3160 >        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3161 >        Object v;
3162 >        while ((v = it.advance()) != null) {
3163 >            h += it.nextKey.hashCode() ^ v.hashCode();
3164          }
3165          return h;
3166      }
# Line 1342 | Line 3177 | public class ConcurrentHashMapV8<K, V>
3177       * @return a string representation of this map
3178       */
3179      public String toString() {
3180 <        InternalIterator it = new InternalIterator(table);
3180 >        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3181          StringBuilder sb = new StringBuilder();
3182          sb.append('{');
3183 <        if (it.next != null) {
3183 >        Object v;
3184 >        if ((v = it.advance()) != null) {
3185              for (;;) {
3186 <                Object k = it.nextKey, v = it.nextVal;
3186 >                Object k = it.nextKey;
3187                  sb.append(k == this ? "(this Map)" : k);
3188                  sb.append('=');
3189                  sb.append(v == this ? "(this Map)" : v);
3190 <                it.advance();
1355 <                if (it.next == null)
3190 >                if ((v = it.advance()) == null)
3191                      break;
3192                  sb.append(',').append(' ');
3193              }
# Line 1375 | Line 3210 | public class ConcurrentHashMapV8<K, V>
3210              if (!(o instanceof Map))
3211                  return false;
3212              Map<?,?> m = (Map<?,?>) o;
3213 <            InternalIterator it = new InternalIterator(table);
3214 <            while (it.next != null) {
3215 <                Object val = it.nextVal;
3213 >            Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3214 >            Object val;
3215 >            while ((val = it.advance()) != null) {
3216                  Object v = m.get(it.nextKey);
3217                  if (v == null || (v != val && !v.equals(val)))
3218                      return false;
1384                it.advance();
3219              }
3220              for (Map.Entry<?,?> e : m.entrySet()) {
3221                  Object mk, mv, v;
# Line 1397 | Line 3231 | public class ConcurrentHashMapV8<K, V>
3231  
3232      /* ----------------Iterators -------------- */
3233  
3234 <    /**
3235 <     * Base class for key, value, and entry iterators.  Adds a map
3236 <     * reference to InternalIterator to support Iterator.remove.
3237 <     */
3238 <    static abstract class ViewIterator<K,V> extends InternalIterator {
1405 <        final ConcurrentHashMapV8<K, V> map;
1406 <        ViewIterator(ConcurrentHashMapV8<K, V> map) {
1407 <            super(map.table);
1408 <            this.map = map;
3234 >    @SuppressWarnings("serial") static final class KeyIterator<K,V> extends Traverser<K,V,Object>
3235 >        implements Spliterator<K>, Enumeration<K> {
3236 >        KeyIterator(ConcurrentHashMapV8<K, V> map) { super(map); }
3237 >        KeyIterator(ConcurrentHashMapV8<K, V> map, Traverser<K,V,Object> it) {
3238 >            super(map, it, -1);
3239          }
3240 <
3241 <        public final void remove() {
1412 <            if (last == null)
3240 >        public KeyIterator<K,V> split() {
3241 >            if (nextKey != null)
3242                  throw new IllegalStateException();
3243 <            map.remove(last.key);
1415 <            last = null;
3243 >            return new KeyIterator<K,V>(map, this);
3244          }
3245 <
3246 <        public final boolean hasNext()         { return next != null; }
1419 <        public final boolean hasMoreElements() { return next != null; }
1420 <    }
1421 <
1422 <    static final class KeyIterator<K,V> extends ViewIterator<K,V>
1423 <        implements Iterator<K>, Enumeration<K> {
1424 <        KeyIterator(ConcurrentHashMapV8<K, V> map) { super(map); }
1425 <
1426 <        @SuppressWarnings("unchecked")
1427 <        public final K next() {
1428 <            if (next == null)
3245 >        @SuppressWarnings("unchecked") public final K next() {
3246 >            if (nextVal == null && advance() == null)
3247                  throw new NoSuchElementException();
3248              Object k = nextKey;
3249 <            advance();
3250 <            return (K)k;
3249 >            nextVal = null;
3250 >            return (K) k;
3251          }
3252  
3253          public final K nextElement() { return next(); }
3254      }
3255  
3256 <    static final class ValueIterator<K,V> extends ViewIterator<K,V>
3257 <        implements Iterator<V>, Enumeration<V> {
3256 >    @SuppressWarnings("serial") static final class ValueIterator<K,V> extends Traverser<K,V,Object>
3257 >        implements Spliterator<V>, Enumeration<V> {
3258          ValueIterator(ConcurrentHashMapV8<K, V> map) { super(map); }
3259 +        ValueIterator(ConcurrentHashMapV8<K, V> map, Traverser<K,V,Object> it) {
3260 +            super(map, it, -1);
3261 +        }
3262 +        public ValueIterator<K,V> split() {
3263 +            if (nextKey != null)
3264 +                throw new IllegalStateException();
3265 +            return new ValueIterator<K,V>(map, this);
3266 +        }
3267  
3268 <        @SuppressWarnings("unchecked")
3269 <        public final V next() {
3270 <            if (next == null)
3268 >        @SuppressWarnings("unchecked") public final V next() {
3269 >            Object v;
3270 >            if ((v = nextVal) == null && (v = advance()) == null)
3271                  throw new NoSuchElementException();
3272 <            Object v = nextVal;
3273 <            advance();
1448 <            return (V)v;
3272 >            nextVal = null;
3273 >            return (V) v;
3274          }
3275  
3276          public final V nextElement() { return next(); }
3277      }
3278  
3279 <    static final class EntryIterator<K,V> extends ViewIterator<K,V>
3280 <        implements Iterator<Map.Entry<K,V>> {
3279 >    @SuppressWarnings("serial") static final class EntryIterator<K,V> extends Traverser<K,V,Object>
3280 >        implements Spliterator<Map.Entry<K,V>> {
3281          EntryIterator(ConcurrentHashMapV8<K, V> map) { super(map); }
3282 +        EntryIterator(ConcurrentHashMapV8<K, V> map, Traverser<K,V,Object> it) {
3283 +            super(map, it, -1);
3284 +        }
3285 +        public EntryIterator<K,V> split() {
3286 +            if (nextKey != null)
3287 +                throw new IllegalStateException();
3288 +            return new EntryIterator<K,V>(map, this);
3289 +        }
3290  
3291 <        @SuppressWarnings("unchecked")
3292 <        public final Map.Entry<K,V> next() {
3293 <            if (next == null)
3291 >        @SuppressWarnings("unchecked") public final Map.Entry<K,V> next() {
3292 >            Object v;
3293 >            if ((v = nextVal) == null && (v = advance()) == null)
3294                  throw new NoSuchElementException();
3295              Object k = nextKey;
3296 <            Object v = nextVal;
3297 <            advance();
1465 <            return new WriteThroughEntry<K,V>(map, (K)k, (V)v);
3296 >            nextVal = null;
3297 >            return new MapEntry<K,V>((K)k, (V)v, map);
3298          }
3299      }
3300  
3301      /**
3302 <     * Custom Entry class used by EntryIterator.next(), that relays
1471 <     * setValue changes to the underlying map.
3302 >     * Exported Entry for iterators
3303       */
3304 <    static final class WriteThroughEntry<K,V> implements Map.Entry<K, V> {
1474 <        final ConcurrentHashMapV8<K, V> map;
3304 >    static final class MapEntry<K,V> implements Map.Entry<K, V> {
3305          final K key; // non-null
3306          V val;       // non-null
3307 <        WriteThroughEntry(ConcurrentHashMapV8<K, V> map, K key, V val) {
3308 <            this.map = map; this.key = key; this.val = val;
3307 >        final ConcurrentHashMapV8<K, V> map;
3308 >        MapEntry(K key, V val, ConcurrentHashMapV8<K, V> map) {
3309 >            this.key = key;
3310 >            this.val = val;
3311 >            this.map = map;
3312          }
1480
3313          public final K getKey()       { return key; }
3314          public final V getValue()     { return val; }
3315          public final int hashCode()   { return key.hashCode() ^ val.hashCode(); }
# Line 1494 | Line 3326 | public class ConcurrentHashMapV8<K, V>
3326  
3327          /**
3328           * Sets our entry's value and writes through to the map. The
3329 <         * value to return is somewhat arbitrary here. Since a
3330 <         * WriteThroughEntry does not necessarily track asynchronous
3331 <         * changes, the most recent "previous" value could be
3332 <         * different from what we return (or could even have been
3333 <         * removed in which case the put will re-establish). We do not
1502 <         * and cannot guarantee more.
3329 >         * value to return is somewhat arbitrary here. Since we do not
3330 >         * necessarily track asynchronous changes, the most recent
3331 >         * "previous" value could be different from what we return (or
3332 >         * could even have been removed in which case the put will
3333 >         * re-establish). We do not and cannot guarantee more.
3334           */
3335          public final V setValue(V value) {
3336              if (value == null) throw new NullPointerException();
# Line 1510 | Line 3341 | public class ConcurrentHashMapV8<K, V>
3341          }
3342      }
3343  
3344 <    /* ----------------Views -------------- */
3345 <
3346 <    /*
1516 <     * These currently just extend java.util.AbstractX classes, but
1517 <     * may need a new custom base to support partitioned traversal.
3344 >    /**
3345 >     * Returns exportable snapshot entry for the given key and value
3346 >     * when write-through can't or shouldn't be used.
3347       */
3348 <
3349 <    static final class KeySet<K,V> extends AbstractSet<K> {
1521 <        final ConcurrentHashMapV8<K, V> map;
1522 <        KeySet(ConcurrentHashMapV8<K, V> map)   { this.map = map; }
1523 <
1524 <        public final int size()                 { return map.size(); }
1525 <        public final boolean isEmpty()          { return map.isEmpty(); }
1526 <        public final void clear()               { map.clear(); }
1527 <        public final boolean contains(Object o) { return map.containsKey(o); }
1528 <        public final boolean remove(Object o)   { return map.remove(o) != null; }
1529 <        public final Iterator<K> iterator() {
1530 <            return new KeyIterator<K,V>(map);
1531 <        }
1532 <    }
1533 <
1534 <    static final class Values<K,V> extends AbstractCollection<V> {
1535 <        final ConcurrentHashMapV8<K, V> map;
1536 <        Values(ConcurrentHashMapV8<K, V> map)   { this.map = map; }
1537 <
1538 <        public final int size()                 { return map.size(); }
1539 <        public final boolean isEmpty()          { return map.isEmpty(); }
1540 <        public final void clear()               { map.clear(); }
1541 <        public final boolean contains(Object o) { return map.containsValue(o); }
1542 <        public final Iterator<V> iterator() {
1543 <            return new ValueIterator<K,V>(map);
1544 <        }
1545 <    }
1546 <
1547 <    static final class EntrySet<K,V> extends AbstractSet<Map.Entry<K,V>> {
1548 <        final ConcurrentHashMapV8<K, V> map;
1549 <        EntrySet(ConcurrentHashMapV8<K, V> map) { this.map = map; }
1550 <
1551 <        public final int size()                 { return map.size(); }
1552 <        public final boolean isEmpty()          { return map.isEmpty(); }
1553 <        public final void clear()               { map.clear(); }
1554 <        public final Iterator<Map.Entry<K,V>> iterator() {
1555 <            return new EntryIterator<K,V>(map);
1556 <        }
1557 <
1558 <        public final boolean contains(Object o) {
1559 <            Object k, v, r; Map.Entry<?,?> e;
1560 <            return ((o instanceof Map.Entry) &&
1561 <                    (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
1562 <                    (r = map.get(k)) != null &&
1563 <                    (v = e.getValue()) != null &&
1564 <                    (v == r || v.equals(r)));
1565 <        }
1566 <
1567 <        public final boolean remove(Object o) {
1568 <            Object k, v; Map.Entry<?,?> e;
1569 <            return ((o instanceof Map.Entry) &&
1570 <                    (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
1571 <                    (v = e.getValue()) != null &&
1572 <                    map.remove(k, v));
1573 <        }
3348 >    static <K,V> AbstractMap.SimpleEntry<K,V> entryFor(K k, V v) {
3349 >        return new AbstractMap.SimpleEntry<K,V>(k, v);
3350      }
3351  
3352      /* ---------------- Serialization Support -------------- */
# Line 1594 | Line 3370 | public class ConcurrentHashMapV8<K, V>
3370       * for each key-value mapping, followed by a null pair.
3371       * The key-value mappings are emitted in no particular order.
3372       */
3373 <    @SuppressWarnings("unchecked")
3374 <    private void writeObject(java.io.ObjectOutputStream s)
1599 <            throws java.io.IOException {
3373 >    @SuppressWarnings("unchecked") private void writeObject(java.io.ObjectOutputStream s)
3374 >        throws java.io.IOException {
3375          if (segments == null) { // for serialization compatibility
3376              segments = (Segment<K,V>[])
3377                  new Segment<?,?>[DEFAULT_CONCURRENCY_LEVEL];
# Line 1604 | Line 3379 | public class ConcurrentHashMapV8<K, V>
3379                  segments[i] = new Segment<K,V>(LOAD_FACTOR);
3380          }
3381          s.defaultWriteObject();
3382 <        InternalIterator it = new InternalIterator(table);
3383 <        while (it.next != null) {
3382 >        Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3383 >        Object v;
3384 >        while ((v = it.advance()) != null) {
3385              s.writeObject(it.nextKey);
3386 <            s.writeObject(it.nextVal);
1611 <            it.advance();
3386 >            s.writeObject(v);
3387          }
3388          s.writeObject(null);
3389          s.writeObject(null);
# Line 1619 | Line 3394 | public class ConcurrentHashMapV8<K, V>
3394       * Reconstitutes the instance from a stream (that is, deserializes it).
3395       * @param s the stream
3396       */
3397 <    @SuppressWarnings("unchecked")
3398 <    private void readObject(java.io.ObjectInputStream s)
1624 <            throws java.io.IOException, ClassNotFoundException {
3397 >    @SuppressWarnings("unchecked") private void readObject(java.io.ObjectInputStream s)
3398 >        throws java.io.IOException, ClassNotFoundException {
3399          s.defaultReadObject();
3400          this.segments = null; // unneeded
3401 <        // initalize transient final field
3401 >        // initialize transient final field
3402          UNSAFE.putObjectVolatile(this, counterOffset, new LongAdder());
1629        this.targetCapacity = DEFAULT_CAPACITY;
3403  
3404          // Create all nodes, then place in table once size is known
3405          long size = 0L;
# Line 1635 | Line 3408 | public class ConcurrentHashMapV8<K, V>
3408              K k = (K) s.readObject();
3409              V v = (V) s.readObject();
3410              if (k != null && v != null) {
3411 <                p = new Node(spread(k.hashCode()), k, v, p);
3411 >                int h = spread(k.hashCode());
3412 >                p = new Node(h, k, v, p);
3413                  ++size;
3414              }
3415              else
# Line 1643 | Line 3417 | public class ConcurrentHashMapV8<K, V>
3417          }
3418          if (p != null) {
3419              boolean init = false;
3420 <            if (resizing == 0 &&
3421 <                UNSAFE.compareAndSwapInt(this, resizingOffset, 0, 1)) {
3420 >            int n;
3421 >            if (size >= (long)(MAXIMUM_CAPACITY >>> 1))
3422 >                n = MAXIMUM_CAPACITY;
3423 >            else {
3424 >                int sz = (int)size;
3425 >                n = tableSizeFor(sz + (sz >>> 1) + 1);
3426 >            }
3427 >            int sc = sizeCtl;
3428 >            boolean collide = false;
3429 >            if (n > sc &&
3430 >                UNSAFE.compareAndSwapInt(this, sizeCtlOffset, sc, -1)) {
3431                  try {
3432                      if (table == null) {
3433                          init = true;
1651                        int n;
1652                        if (size >= (long)(MAXIMUM_CAPACITY >>> 1))
1653                            n = MAXIMUM_CAPACITY;
1654                        else {
1655                            int sz = (int)size;
1656                            n = tableSizeFor(sz + (sz >>> 1) + 1);
1657                        }
1658                        threshold = n - (n >>> 2) - THRESHOLD_OFFSET;
3434                          Node[] tab = new Node[n];
3435                          int mask = n - 1;
3436                          while (p != null) {
3437                              int j = p.hash & mask;
3438                              Node next = p.next;
3439 <                            p.next = tabAt(tab, j);
3439 >                            Node q = p.next = tabAt(tab, j);
3440                              setTabAt(tab, j, p);
3441 +                            if (!collide && q != null && q.hash == p.hash)
3442 +                                collide = true;
3443                              p = next;
3444                          }
3445                          table = tab;
3446                          counter.add(size);
3447 +                        sc = n - (n >>> 2);
3448                      }
3449                  } finally {
3450 <                    resizing = 0;
3450 >                    sizeCtl = sc;
3451 >                }
3452 >                if (collide) { // rescan and convert to TreeBins
3453 >                    Node[] tab = table;
3454 >                    for (int i = 0; i < tab.length; ++i) {
3455 >                        int c = 0;
3456 >                        for (Node e = tabAt(tab, i); e != null; e = e.next) {
3457 >                            if (++c > TREE_THRESHOLD &&
3458 >                                (e.key instanceof Comparable)) {
3459 >                                replaceWithTreeBin(tab, i, e.key);
3460 >                                break;
3461 >                            }
3462 >                        }
3463 >                    }
3464                  }
3465              }
3466              if (!init) { // Can only happen if unsafely published.
3467                  while (p != null) {
3468 <                    internalPut(p.key, p.val, true);
3468 >                    internalPut(p.key, p.val);
3469                      p = p.next;
3470                  }
3471              }
3472          }
3473      }
3474  
3475 +
3476 +    // -------------------------------------------------------
3477 +
3478 +    // Sams
3479 +    /** Interface describing a void action of one argument */
3480 +    public interface Action<A> { void apply(A a); }
3481 +    /** Interface describing a void action of two arguments */
3482 +    public interface BiAction<A,B> { void apply(A a, B b); }
3483 +    /** Interface describing a function of one argument */
3484 +    public interface Fun<A,T> { T apply(A a); }
3485 +    /** Interface describing a function of two arguments */
3486 +    public interface BiFun<A,B,T> { T apply(A a, B b); }
3487 +    /** Interface describing a function of no arguments */
3488 +    public interface Generator<T> { T apply(); }
3489 +    /** Interface describing a function mapping its argument to a double */
3490 +    public interface ObjectToDouble<A> { double apply(A a); }
3491 +    /** Interface describing a function mapping its argument to a long */
3492 +    public interface ObjectToLong<A> { long apply(A a); }
3493 +    /** Interface describing a function mapping its argument to an int */
3494 +    public interface ObjectToInt<A> {int apply(A a); }
3495 +    /** Interface describing a function mapping two arguments to a double */
3496 +    public interface ObjectByObjectToDouble<A,B> { double apply(A a, B b); }
3497 +    /** Interface describing a function mapping two arguments to a long */
3498 +    public interface ObjectByObjectToLong<A,B> { long apply(A a, B b); }
3499 +    /** Interface describing a function mapping two arguments to an int */
3500 +    public interface ObjectByObjectToInt<A,B> {int apply(A a, B b); }
3501 +    /** Interface describing a function mapping a double to a double */
3502 +    public interface DoubleToDouble { double apply(double a); }
3503 +    /** Interface describing a function mapping a long to a long */
3504 +    public interface LongToLong { long apply(long a); }
3505 +    /** Interface describing a function mapping an int to an int */
3506 +    public interface IntToInt { int apply(int a); }
3507 +    /** Interface describing a function mapping two doubles to a double */
3508 +    public interface DoubleByDoubleToDouble { double apply(double a, double b); }
3509 +    /** Interface describing a function mapping two longs to a long */
3510 +    public interface LongByLongToLong { long apply(long a, long b); }
3511 +    /** Interface describing a function mapping two ints to an int */
3512 +    public interface IntByIntToInt { int apply(int a, int b); }
3513 +
3514 +
3515 +    // -------------------------------------------------------
3516 +
3517 +    /**
3518 +     * Performs the given action for each (key, value).
3519 +     *
3520 +     * @param action the action
3521 +     */
3522 +    public void forEach(BiAction<K,V> action) {
3523 +        ForkJoinTasks.forEach
3524 +            (this, action).invoke();
3525 +    }
3526 +
3527 +    /**
3528 +     * Performs the given action for each non-null transformation
3529 +     * of each (key, value).
3530 +     *
3531 +     * @param transformer a function returning the transformation
3532 +     * for an element, or null of there is no transformation (in
3533 +     * which case the action is not applied).
3534 +     * @param action the action
3535 +     */
3536 +    public <U> void forEach(BiFun<? super K, ? super V, ? extends U> transformer,
3537 +                            Action<U> action) {
3538 +        ForkJoinTasks.forEach
3539 +            (this, transformer, action).invoke();
3540 +    }
3541 +
3542 +    /**
3543 +     * Returns a non-null result from applying the given search
3544 +     * function on each (key, value), or null if none.  Upon
3545 +     * success, further element processing is suppressed and the
3546 +     * results of any other parallel invocations of the search
3547 +     * function are ignored.
3548 +     *
3549 +     * @param searchFunction a function returning a non-null
3550 +     * result on success, else null
3551 +     * @return a non-null result from applying the given search
3552 +     * function on each (key, value), or null if none
3553 +     */
3554 +    public <U> U search(BiFun<? super K, ? super V, ? extends U> searchFunction) {
3555 +        return ForkJoinTasks.search
3556 +            (this, searchFunction).invoke();
3557 +    }
3558 +
3559 +    /**
3560 +     * Returns the result of accumulating the given transformation
3561 +     * of all (key, value) pairs using the given reducer to
3562 +     * combine values, or null if none.
3563 +     *
3564 +     * @param transformer a function returning the transformation
3565 +     * for an element, or null of there is no transformation (in
3566 +     * which case it is not combined).
3567 +     * @param reducer a commutative associative combining function
3568 +     * @return the result of accumulating the given transformation
3569 +     * of all (key, value) pairs
3570 +     */
3571 +    public <U> U reduce(BiFun<? super K, ? super V, ? extends U> transformer,
3572 +                        BiFun<? super U, ? super U, ? extends U> reducer) {
3573 +        return ForkJoinTasks.reduce
3574 +            (this, transformer, reducer).invoke();
3575 +    }
3576 +
3577 +    /**
3578 +     * Returns the result of accumulating the given transformation
3579 +     * of all (key, value) pairs using the given reducer to
3580 +     * combine values, and the given basis as an identity value.
3581 +     *
3582 +     * @param transformer a function returning the transformation
3583 +     * for an element
3584 +     * @param basis the identity (initial default value) for the reduction
3585 +     * @param reducer a commutative associative combining function
3586 +     * @return the result of accumulating the given transformation
3587 +     * of all (key, value) pairs
3588 +     */
3589 +    public double reduceToDouble(ObjectByObjectToDouble<? super K, ? super V> transformer,
3590 +                                 double basis,
3591 +                                 DoubleByDoubleToDouble reducer) {
3592 +        return ForkJoinTasks.reduceToDouble
3593 +            (this, transformer, basis, reducer).invoke();
3594 +    }
3595 +
3596 +    /**
3597 +     * Returns the result of accumulating the given transformation
3598 +     * of all (key, value) pairs using the given reducer to
3599 +     * combine values, and the given basis as an identity value.
3600 +     *
3601 +     * @param transformer a function returning the transformation
3602 +     * for an element
3603 +     * @param basis the identity (initial default value) for the reduction
3604 +     * @param reducer a commutative associative combining function
3605 +     * @return the result of accumulating the given transformation
3606 +     * of all (key, value) pairs
3607 +     */
3608 +    public long reduceToLong(ObjectByObjectToLong<? super K, ? super V> transformer,
3609 +                             long basis,
3610 +                             LongByLongToLong reducer) {
3611 +        return ForkJoinTasks.reduceToLong
3612 +            (this, transformer, basis, reducer).invoke();
3613 +    }
3614 +
3615 +    /**
3616 +     * Returns the result of accumulating the given transformation
3617 +     * of all (key, value) pairs using the given reducer to
3618 +     * combine values, and the given basis as an identity value.
3619 +     *
3620 +     * @param transformer a function returning the transformation
3621 +     * for an element
3622 +     * @param basis the identity (initial default value) for the reduction
3623 +     * @param reducer a commutative associative combining function
3624 +     * @return the result of accumulating the given transformation
3625 +     * of all (key, value) pairs
3626 +     */
3627 +    public int reduceToInt(ObjectByObjectToInt<? super K, ? super V> transformer,
3628 +                           int basis,
3629 +                           IntByIntToInt reducer) {
3630 +        return ForkJoinTasks.reduceToInt
3631 +            (this, transformer, basis, reducer).invoke();
3632 +    }
3633 +
3634 +    /**
3635 +     * Performs the given action for each key.
3636 +     *
3637 +     * @param action the action
3638 +     */
3639 +    public void forEachKey(Action<K> action) {
3640 +        ForkJoinTasks.forEachKey
3641 +            (this, action).invoke();
3642 +    }
3643 +
3644 +    /**
3645 +     * Performs the given action for each non-null transformation
3646 +     * of each key.
3647 +     *
3648 +     * @param transformer a function returning the transformation
3649 +     * for an element, or null of there is no transformation (in
3650 +     * which case the action is not applied).
3651 +     * @param action the action
3652 +     */
3653 +    public <U> void forEachKey(Fun<? super K, ? extends U> transformer,
3654 +                               Action<U> action) {
3655 +        ForkJoinTasks.forEachKey
3656 +            (this, transformer, action).invoke();
3657 +    }
3658 +
3659 +    /**
3660 +     * Returns a non-null result from applying the given search
3661 +     * function on each key, or null if none. Upon success,
3662 +     * further element processing is suppressed and the results of
3663 +     * any other parallel invocations of the search function are
3664 +     * ignored.
3665 +     *
3666 +     * @param searchFunction a function returning a non-null
3667 +     * result on success, else null
3668 +     * @return a non-null result from applying the given search
3669 +     * function on each key, or null if none
3670 +     */
3671 +    public <U> U searchKeys(Fun<? super K, ? extends U> searchFunction) {
3672 +        return ForkJoinTasks.searchKeys
3673 +            (this, searchFunction).invoke();
3674 +    }
3675 +
3676 +    /**
3677 +     * Returns the result of accumulating all keys using the given
3678 +     * reducer to combine values, or null if none.
3679 +     *
3680 +     * @param reducer a commutative associative combining function
3681 +     * @return the result of accumulating all keys using the given
3682 +     * reducer to combine values, or null if none
3683 +     */
3684 +    public K reduceKeys(BiFun<? super K, ? super K, ? extends K> reducer) {
3685 +        return ForkJoinTasks.reduceKeys
3686 +            (this, reducer).invoke();
3687 +    }
3688 +
3689 +    /**
3690 +     * Returns the result of accumulating the given transformation
3691 +     * of all keys using the given reducer to combine values, or
3692 +     * null if none.
3693 +     *
3694 +     * @param transformer a function returning the transformation
3695 +     * for an element, or null of there is no transformation (in
3696 +     * which case it is not combined).
3697 +     * @param reducer a commutative associative combining function
3698 +     * @return the result of accumulating the given transformation
3699 +     * of all keys
3700 +     */
3701 +    public <U> U reduceKeys(Fun<? super K, ? extends U> transformer,
3702 +                            BiFun<? super U, ? super U, ? extends U> reducer) {
3703 +        return ForkJoinTasks.reduceKeys
3704 +            (this, transformer, reducer).invoke();
3705 +    }
3706 +
3707 +    /**
3708 +     * Returns the result of accumulating the given transformation
3709 +     * of all keys using the given reducer to combine values, and
3710 +     * the given basis as an identity value.
3711 +     *
3712 +     * @param transformer a function returning the transformation
3713 +     * for an element
3714 +     * @param basis the identity (initial default value) for the reduction
3715 +     * @param reducer a commutative associative combining function
3716 +     * @return  the result of accumulating the given transformation
3717 +     * of all keys
3718 +     */
3719 +    public double reduceKeysToDouble(ObjectToDouble<? super K> transformer,
3720 +                                     double basis,
3721 +                                     DoubleByDoubleToDouble reducer) {
3722 +        return ForkJoinTasks.reduceKeysToDouble
3723 +            (this, transformer, basis, reducer).invoke();
3724 +    }
3725 +
3726 +    /**
3727 +     * Returns the result of accumulating the given transformation
3728 +     * of all keys using the given reducer to combine values, and
3729 +     * the given basis as an identity value.
3730 +     *
3731 +     * @param transformer a function returning the transformation
3732 +     * for an element
3733 +     * @param basis the identity (initial default value) for the reduction
3734 +     * @param reducer a commutative associative combining function
3735 +     * @return the result of accumulating the given transformation
3736 +     * of all keys
3737 +     */
3738 +    public long reduceKeysToLong(ObjectToLong<? super K> transformer,
3739 +                                 long basis,
3740 +                                 LongByLongToLong reducer) {
3741 +        return ForkJoinTasks.reduceKeysToLong
3742 +            (this, transformer, basis, reducer).invoke();
3743 +    }
3744 +
3745 +    /**
3746 +     * Returns the result of accumulating the given transformation
3747 +     * of all keys using the given reducer to combine values, and
3748 +     * the given basis as an identity value.
3749 +     *
3750 +     * @param transformer a function returning the transformation
3751 +     * for an element
3752 +     * @param basis the identity (initial default value) for the reduction
3753 +     * @param reducer a commutative associative combining function
3754 +     * @return the result of accumulating the given transformation
3755 +     * of all keys
3756 +     */
3757 +    public int reduceKeysToInt(ObjectToInt<? super K> transformer,
3758 +                               int basis,
3759 +                               IntByIntToInt reducer) {
3760 +        return ForkJoinTasks.reduceKeysToInt
3761 +            (this, transformer, basis, reducer).invoke();
3762 +    }
3763 +
3764 +    /**
3765 +     * Performs the given action for each value.
3766 +     *
3767 +     * @param action the action
3768 +     */
3769 +    public void forEachValue(Action<V> action) {
3770 +        ForkJoinTasks.forEachValue
3771 +            (this, action).invoke();
3772 +    }
3773 +
3774 +    /**
3775 +     * Performs the given action for each non-null transformation
3776 +     * of each value.
3777 +     *
3778 +     * @param transformer a function returning the transformation
3779 +     * for an element, or null of there is no transformation (in
3780 +     * which case the action is not applied).
3781 +     */
3782 +    public <U> void forEachValue(Fun<? super V, ? extends U> transformer,
3783 +                                 Action<U> action) {
3784 +        ForkJoinTasks.forEachValue
3785 +            (this, transformer, action).invoke();
3786 +    }
3787 +
3788 +    /**
3789 +     * Returns a non-null result from applying the given search
3790 +     * function on each value, or null if none.  Upon success,
3791 +     * further element processing is suppressed and the results of
3792 +     * any other parallel invocations of the search function are
3793 +     * ignored.
3794 +     *
3795 +     * @param searchFunction a function returning a non-null
3796 +     * result on success, else null
3797 +     * @return a non-null result from applying the given search
3798 +     * function on each value, or null if none
3799 +     *
3800 +     */
3801 +    public <U> U searchValues(Fun<? super V, ? extends U> searchFunction) {
3802 +        return ForkJoinTasks.searchValues
3803 +            (this, searchFunction).invoke();
3804 +    }
3805 +
3806 +    /**
3807 +     * Returns the result of accumulating all values using the
3808 +     * given reducer to combine values, or null if none.
3809 +     *
3810 +     * @param reducer a commutative associative combining function
3811 +     * @return  the result of accumulating all values
3812 +     */
3813 +    public V reduceValues(BiFun<? super V, ? super V, ? extends V> reducer) {
3814 +        return ForkJoinTasks.reduceValues
3815 +            (this, reducer).invoke();
3816 +    }
3817 +
3818 +    /**
3819 +     * Returns the result of accumulating the given transformation
3820 +     * of all values using the given reducer to combine values, or
3821 +     * null if none.
3822 +     *
3823 +     * @param transformer a function returning the transformation
3824 +     * for an element, or null of there is no transformation (in
3825 +     * which case it is not combined).
3826 +     * @param reducer a commutative associative combining function
3827 +     * @return the result of accumulating the given transformation
3828 +     * of all values
3829 +     */
3830 +    public <U> U reduceValues(Fun<? super V, ? extends U> transformer,
3831 +                              BiFun<? super U, ? super U, ? extends U> reducer) {
3832 +        return ForkJoinTasks.reduceValues
3833 +            (this, transformer, reducer).invoke();
3834 +    }
3835 +
3836 +    /**
3837 +     * Returns the result of accumulating the given transformation
3838 +     * of all values using the given reducer to combine values,
3839 +     * and the given basis as an identity value.
3840 +     *
3841 +     * @param transformer a function returning the transformation
3842 +     * for an element
3843 +     * @param basis the identity (initial default value) for the reduction
3844 +     * @param reducer a commutative associative combining function
3845 +     * @return the result of accumulating the given transformation
3846 +     * of all values
3847 +     */
3848 +    public double reduceValuesToDouble(ObjectToDouble<? super V> transformer,
3849 +                                       double basis,
3850 +                                       DoubleByDoubleToDouble reducer) {
3851 +        return ForkJoinTasks.reduceValuesToDouble
3852 +            (this, transformer, basis, reducer).invoke();
3853 +    }
3854 +
3855 +    /**
3856 +     * Returns the result of accumulating the given transformation
3857 +     * of all values using the given reducer to combine values,
3858 +     * and the given basis as an identity value.
3859 +     *
3860 +     * @param transformer a function returning the transformation
3861 +     * for an element
3862 +     * @param basis the identity (initial default value) for the reduction
3863 +     * @param reducer a commutative associative combining function
3864 +     * @return the result of accumulating the given transformation
3865 +     * of all values
3866 +     */
3867 +    public long reduceValuesToLong(ObjectToLong<? super V> transformer,
3868 +                                   long basis,
3869 +                                   LongByLongToLong reducer) {
3870 +        return ForkJoinTasks.reduceValuesToLong
3871 +            (this, transformer, basis, reducer).invoke();
3872 +    }
3873 +
3874 +    /**
3875 +     * Returns the result of accumulating the given transformation
3876 +     * of all values using the given reducer to combine values,
3877 +     * and the given basis as an identity value.
3878 +     *
3879 +     * @param transformer a function returning the transformation
3880 +     * for an element
3881 +     * @param basis the identity (initial default value) for the reduction
3882 +     * @param reducer a commutative associative combining function
3883 +     * @return the result of accumulating the given transformation
3884 +     * of all values
3885 +     */
3886 +    public int reduceValuesToInt(ObjectToInt<? super V> transformer,
3887 +                                 int basis,
3888 +                                 IntByIntToInt reducer) {
3889 +        return ForkJoinTasks.reduceValuesToInt
3890 +            (this, transformer, basis, reducer).invoke();
3891 +    }
3892 +
3893 +    /**
3894 +     * Performs the given action for each entry.
3895 +     *
3896 +     * @param action the action
3897 +     */
3898 +    public void forEachEntry(Action<Map.Entry<K,V>> action) {
3899 +        ForkJoinTasks.forEachEntry
3900 +            (this, action).invoke();
3901 +    }
3902 +
3903 +    /**
3904 +     * Performs the given action for each non-null transformation
3905 +     * of each entry.
3906 +     *
3907 +     * @param transformer a function returning the transformation
3908 +     * for an element, or null of there is no transformation (in
3909 +     * which case the action is not applied).
3910 +     * @param action the action
3911 +     */
3912 +    public <U> void forEachEntry(Fun<Map.Entry<K,V>, ? extends U> transformer,
3913 +                                 Action<U> action) {
3914 +        ForkJoinTasks.forEachEntry
3915 +            (this, transformer, action).invoke();
3916 +    }
3917 +
3918 +    /**
3919 +     * Returns a non-null result from applying the given search
3920 +     * function on each entry, or null if none.  Upon success,
3921 +     * further element processing is suppressed and the results of
3922 +     * any other parallel invocations of the search function are
3923 +     * ignored.
3924 +     *
3925 +     * @param searchFunction a function returning a non-null
3926 +     * result on success, else null
3927 +     * @return a non-null result from applying the given search
3928 +     * function on each entry, or null if none
3929 +     */
3930 +    public <U> U searchEntries(Fun<Map.Entry<K,V>, ? extends U> searchFunction) {
3931 +        return ForkJoinTasks.searchEntries
3932 +            (this, searchFunction).invoke();
3933 +    }
3934 +
3935 +    /**
3936 +     * Returns the result of accumulating all entries using the
3937 +     * given reducer to combine values, or null if none.
3938 +     *
3939 +     * @param reducer a commutative associative combining function
3940 +     * @return the result of accumulating all entries
3941 +     */
3942 +    public Map.Entry<K,V> reduceEntries(BiFun<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
3943 +        return ForkJoinTasks.reduceEntries
3944 +            (this, reducer).invoke();
3945 +    }
3946 +
3947 +    /**
3948 +     * Returns the result of accumulating the given transformation
3949 +     * of all entries using the given reducer to combine values,
3950 +     * or null if none.
3951 +     *
3952 +     * @param transformer a function returning the transformation
3953 +     * for an element, or null of there is no transformation (in
3954 +     * which case it is not combined).
3955 +     * @param reducer a commutative associative combining function
3956 +     * @return the result of accumulating the given transformation
3957 +     * of all entries
3958 +     */
3959 +    public <U> U reduceEntries(Fun<Map.Entry<K,V>, ? extends U> transformer,
3960 +                               BiFun<? super U, ? super U, ? extends U> reducer) {
3961 +        return ForkJoinTasks.reduceEntries
3962 +            (this, transformer, reducer).invoke();
3963 +    }
3964 +
3965 +    /**
3966 +     * Returns the result of accumulating the given transformation
3967 +     * of all entries using the given reducer to combine values,
3968 +     * and the given basis as an identity value.
3969 +     *
3970 +     * @param transformer a function returning the transformation
3971 +     * for an element
3972 +     * @param basis the identity (initial default value) for the reduction
3973 +     * @param reducer a commutative associative combining function
3974 +     * @return the result of accumulating the given transformation
3975 +     * of all entries
3976 +     */
3977 +    public double reduceEntriesToDouble(ObjectToDouble<Map.Entry<K,V>> transformer,
3978 +                                        double basis,
3979 +                                        DoubleByDoubleToDouble reducer) {
3980 +        return ForkJoinTasks.reduceEntriesToDouble
3981 +            (this, transformer, basis, reducer).invoke();
3982 +    }
3983 +
3984 +    /**
3985 +     * Returns the result of accumulating the given transformation
3986 +     * of all entries using the given reducer to combine values,
3987 +     * and the given basis as an identity value.
3988 +     *
3989 +     * @param transformer a function returning the transformation
3990 +     * for an element
3991 +     * @param basis the identity (initial default value) for the reduction
3992 +     * @param reducer a commutative associative combining function
3993 +     * @return  the result of accumulating the given transformation
3994 +     * of all entries
3995 +     */
3996 +    public long reduceEntriesToLong(ObjectToLong<Map.Entry<K,V>> transformer,
3997 +                                    long basis,
3998 +                                    LongByLongToLong reducer) {
3999 +        return ForkJoinTasks.reduceEntriesToLong
4000 +            (this, transformer, basis, reducer).invoke();
4001 +    }
4002 +
4003 +    /**
4004 +     * Returns the result of accumulating the given transformation
4005 +     * of all entries using the given reducer to combine values,
4006 +     * and the given basis as an identity value.
4007 +     *
4008 +     * @param transformer a function returning the transformation
4009 +     * for an element
4010 +     * @param basis the identity (initial default value) for the reduction
4011 +     * @param reducer a commutative associative combining function
4012 +     * @return the result of accumulating the given transformation
4013 +     * of all entries
4014 +     */
4015 +    public int reduceEntriesToInt(ObjectToInt<Map.Entry<K,V>> transformer,
4016 +                                  int basis,
4017 +                                  IntByIntToInt reducer) {
4018 +        return ForkJoinTasks.reduceEntriesToInt
4019 +            (this, transformer, basis, reducer).invoke();
4020 +    }
4021 +
4022 +    /* ----------------Views -------------- */
4023 +
4024 +    /**
4025 +     * Base class for views.
4026 +     */
4027 +    static abstract class CHMView<K, V> {
4028 +        final ConcurrentHashMapV8<K, V> map;
4029 +        CHMView(ConcurrentHashMapV8<K, V> map)  { this.map = map; }
4030 +
4031 +        /**
4032 +         * Returns the map backing this view.
4033 +         *
4034 +         * @return the map backing this view
4035 +         */
4036 +        public ConcurrentHashMapV8<K,V> getMap() { return map; }
4037 +
4038 +        public final int size()                 { return map.size(); }
4039 +        public final boolean isEmpty()          { return map.isEmpty(); }
4040 +        public final void clear()               { map.clear(); }
4041 +
4042 +        // implementations below rely on concrete classes supplying these
4043 +        abstract public Iterator<?> iterator();
4044 +        abstract public boolean contains(Object o);
4045 +        abstract public boolean remove(Object o);
4046 +
4047 +        private static final String oomeMsg = "Required array size too large";
4048 +
4049 +        public final Object[] toArray() {
4050 +            long sz = map.mappingCount();
4051 +            if (sz > (long)(MAX_ARRAY_SIZE))
4052 +                throw new OutOfMemoryError(oomeMsg);
4053 +            int n = (int)sz;
4054 +            Object[] r = new Object[n];
4055 +            int i = 0;
4056 +            Iterator<?> it = iterator();
4057 +            while (it.hasNext()) {
4058 +                if (i == n) {
4059 +                    if (n >= MAX_ARRAY_SIZE)
4060 +                        throw new OutOfMemoryError(oomeMsg);
4061 +                    if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
4062 +                        n = MAX_ARRAY_SIZE;
4063 +                    else
4064 +                        n += (n >>> 1) + 1;
4065 +                    r = Arrays.copyOf(r, n);
4066 +                }
4067 +                r[i++] = it.next();
4068 +            }
4069 +            return (i == n) ? r : Arrays.copyOf(r, i);
4070 +        }
4071 +
4072 +        @SuppressWarnings("unchecked") public final <T> T[] toArray(T[] a) {
4073 +            long sz = map.mappingCount();
4074 +            if (sz > (long)(MAX_ARRAY_SIZE))
4075 +                throw new OutOfMemoryError(oomeMsg);
4076 +            int m = (int)sz;
4077 +            T[] r = (a.length >= m) ? a :
4078 +                (T[])java.lang.reflect.Array
4079 +                .newInstance(a.getClass().getComponentType(), m);
4080 +            int n = r.length;
4081 +            int i = 0;
4082 +            Iterator<?> it = iterator();
4083 +            while (it.hasNext()) {
4084 +                if (i == n) {
4085 +                    if (n >= MAX_ARRAY_SIZE)
4086 +                        throw new OutOfMemoryError(oomeMsg);
4087 +                    if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
4088 +                        n = MAX_ARRAY_SIZE;
4089 +                    else
4090 +                        n += (n >>> 1) + 1;
4091 +                    r = Arrays.copyOf(r, n);
4092 +                }
4093 +                r[i++] = (T)it.next();
4094 +            }
4095 +            if (a == r && i < n) {
4096 +                r[i] = null; // null-terminate
4097 +                return r;
4098 +            }
4099 +            return (i == n) ? r : Arrays.copyOf(r, i);
4100 +        }
4101 +
4102 +        public final int hashCode() {
4103 +            int h = 0;
4104 +            for (Iterator<?> it = iterator(); it.hasNext();)
4105 +                h += it.next().hashCode();
4106 +            return h;
4107 +        }
4108 +
4109 +        public final String toString() {
4110 +            StringBuilder sb = new StringBuilder();
4111 +            sb.append('[');
4112 +            Iterator<?> it = iterator();
4113 +            if (it.hasNext()) {
4114 +                for (;;) {
4115 +                    Object e = it.next();
4116 +                    sb.append(e == this ? "(this Collection)" : e);
4117 +                    if (!it.hasNext())
4118 +                        break;
4119 +                    sb.append(',').append(' ');
4120 +                }
4121 +            }
4122 +            return sb.append(']').toString();
4123 +        }
4124 +
4125 +        public final boolean containsAll(Collection<?> c) {
4126 +            if (c != this) {
4127 +                for (Iterator<?> it = c.iterator(); it.hasNext();) {
4128 +                    Object e = it.next();
4129 +                    if (e == null || !contains(e))
4130 +                        return false;
4131 +                }
4132 +            }
4133 +            return true;
4134 +        }
4135 +
4136 +        public final boolean removeAll(Collection<?> c) {
4137 +            boolean modified = false;
4138 +            for (Iterator<?> it = iterator(); it.hasNext();) {
4139 +                if (c.contains(it.next())) {
4140 +                    it.remove();
4141 +                    modified = true;
4142 +                }
4143 +            }
4144 +            return modified;
4145 +        }
4146 +
4147 +        public final boolean retainAll(Collection<?> c) {
4148 +            boolean modified = false;
4149 +            for (Iterator<?> it = iterator(); it.hasNext();) {
4150 +                if (!c.contains(it.next())) {
4151 +                    it.remove();
4152 +                    modified = true;
4153 +                }
4154 +            }
4155 +            return modified;
4156 +        }
4157 +
4158 +    }
4159 +
4160 +    /**
4161 +     * A view of a ConcurrentHashMapV8 as a {@link Set} of keys, in
4162 +     * which additions may optionally be enabled by mapping to a
4163 +     * common value.  This class cannot be directly instantiated. See
4164 +     * {@link #keySet}, {@link #keySet(Object)}, {@link #newKeySet()},
4165 +     * {@link #newKeySet(int)}.
4166 +     */
4167 +    public static class KeySetView<K,V> extends CHMView<K,V> implements Set<K>, java.io.Serializable {
4168 +        private static final long serialVersionUID = 7249069246763182397L;
4169 +        private final V value;
4170 +        KeySetView(ConcurrentHashMapV8<K, V> map, V value) {  // non-public
4171 +            super(map);
4172 +            this.value = value;
4173 +        }
4174 +
4175 +        /**
4176 +         * Returns the default mapped value for additions,
4177 +         * or {@code null} if additions are not supported.
4178 +         *
4179 +         * @return the default mapped value for additions, or {@code null}
4180 +         * if not supported.
4181 +         */
4182 +        public V getMappedValue() { return value; }
4183 +
4184 +        // implement Set API
4185 +
4186 +        public boolean contains(Object o) { return map.containsKey(o); }
4187 +        public boolean remove(Object o)   { return map.remove(o) != null; }
4188 +
4189 +        /**
4190 +         * Returns a "weakly consistent" iterator that will never
4191 +         * throw {@link ConcurrentModificationException}, and
4192 +         * guarantees to traverse elements as they existed upon
4193 +         * construction of the iterator, and may (but is not
4194 +         * guaranteed to) reflect any modifications subsequent to
4195 +         * construction.
4196 +         *
4197 +         * @return an iterator over the keys of this map
4198 +         */
4199 +        public Iterator<K> iterator()     { return new KeyIterator<K,V>(map); }
4200 +        public boolean add(K e) {
4201 +            V v;
4202 +            if ((v = value) == null)
4203 +                throw new UnsupportedOperationException();
4204 +            if (e == null)
4205 +                throw new NullPointerException();
4206 +            return map.internalPutIfAbsent(e, v) == null;
4207 +        }
4208 +        public boolean addAll(Collection<? extends K> c) {
4209 +            boolean added = false;
4210 +            V v;
4211 +            if ((v = value) == null)
4212 +                throw new UnsupportedOperationException();
4213 +            for (K e : c) {
4214 +                if (e == null)
4215 +                    throw new NullPointerException();
4216 +                if (map.internalPutIfAbsent(e, v) == null)
4217 +                    added = true;
4218 +            }
4219 +            return added;
4220 +        }
4221 +        public boolean equals(Object o) {
4222 +            Set<?> c;
4223 +            return ((o instanceof Set) &&
4224 +                    ((c = (Set<?>)o) == this ||
4225 +                     (containsAll(c) && c.containsAll(this))));
4226 +        }
4227 +
4228 +        /**
4229 +         * Performs the given action for each key.
4230 +         *
4231 +         * @param action the action
4232 +         */
4233 +        public void forEach(Action<K> action) {
4234 +            ForkJoinTasks.forEachKey
4235 +                (map, action).invoke();
4236 +        }
4237 +
4238 +        /**
4239 +         * Performs the given action for each non-null transformation
4240 +         * of each key.
4241 +         *
4242 +         * @param transformer a function returning the transformation
4243 +         * for an element, or null of there is no transformation (in
4244 +         * which case the action is not applied).
4245 +         * @param action the action
4246 +         */
4247 +        public <U> void forEach(Fun<? super K, ? extends U> transformer,
4248 +                                Action<U> action) {
4249 +            ForkJoinTasks.forEachKey
4250 +                (map, transformer, action).invoke();
4251 +        }
4252 +
4253 +        /**
4254 +         * Returns a non-null result from applying the given search
4255 +         * function on each key, or null if none. Upon success,
4256 +         * further element processing is suppressed and the results of
4257 +         * any other parallel invocations of the search function are
4258 +         * ignored.
4259 +         *
4260 +         * @param searchFunction a function returning a non-null
4261 +         * result on success, else null
4262 +         * @return a non-null result from applying the given search
4263 +         * function on each key, or null if none
4264 +         */
4265 +        public <U> U search(Fun<? super K, ? extends U> searchFunction) {
4266 +            return ForkJoinTasks.searchKeys
4267 +                (map, searchFunction).invoke();
4268 +        }
4269 +
4270 +        /**
4271 +         * Returns the result of accumulating all keys using the given
4272 +         * reducer to combine values, or null if none.
4273 +         *
4274 +         * @param reducer a commutative associative combining function
4275 +         * @return the result of accumulating all keys using the given
4276 +         * reducer to combine values, or null if none
4277 +         */
4278 +        public K reduce(BiFun<? super K, ? super K, ? extends K> reducer) {
4279 +            return ForkJoinTasks.reduceKeys
4280 +                (map, reducer).invoke();
4281 +        }
4282 +
4283 +        /**
4284 +         * Returns the result of accumulating the given transformation
4285 +         * of all keys using the given reducer to combine values, and
4286 +         * the given basis as an identity value.
4287 +         *
4288 +         * @param transformer a function returning the transformation
4289 +         * for an element
4290 +         * @param basis the identity (initial default value) for the reduction
4291 +         * @param reducer a commutative associative combining function
4292 +         * @return  the result of accumulating the given transformation
4293 +         * of all keys
4294 +         */
4295 +        public double reduceToDouble(ObjectToDouble<? super K> transformer,
4296 +                                     double basis,
4297 +                                     DoubleByDoubleToDouble reducer) {
4298 +            return ForkJoinTasks.reduceKeysToDouble
4299 +                (map, transformer, basis, reducer).invoke();
4300 +        }
4301 +
4302 +
4303 +        /**
4304 +         * Returns the result of accumulating the given transformation
4305 +         * of all keys using the given reducer to combine values, and
4306 +         * the given basis as an identity value.
4307 +         *
4308 +         * @param transformer a function returning the transformation
4309 +         * for an element
4310 +         * @param basis the identity (initial default value) for the reduction
4311 +         * @param reducer a commutative associative combining function
4312 +         * @return the result of accumulating the given transformation
4313 +         * of all keys
4314 +         */
4315 +        public long reduceToLong(ObjectToLong<? super K> transformer,
4316 +                                 long basis,
4317 +                                 LongByLongToLong reducer) {
4318 +            return ForkJoinTasks.reduceKeysToLong
4319 +                (map, transformer, basis, reducer).invoke();
4320 +        }
4321 +
4322 +        /**
4323 +         * Returns the result of accumulating the given transformation
4324 +         * of all keys using the given reducer to combine values, and
4325 +         * the given basis as an identity value.
4326 +         *
4327 +         * @param transformer a function returning the transformation
4328 +         * for an element
4329 +         * @param basis the identity (initial default value) for the reduction
4330 +         * @param reducer a commutative associative combining function
4331 +         * @return the result of accumulating the given transformation
4332 +         * of all keys
4333 +         */
4334 +        public int reduceToInt(ObjectToInt<? super K> transformer,
4335 +                               int basis,
4336 +                               IntByIntToInt reducer) {
4337 +            return ForkJoinTasks.reduceKeysToInt
4338 +                (map, transformer, basis, reducer).invoke();
4339 +        }
4340 +
4341 +    }
4342 +
4343 +    /**
4344 +     * A view of a ConcurrentHashMapV8 as a {@link Collection} of
4345 +     * values, in which additions are disabled. This class cannot be
4346 +     * directly instantiated. See {@link #values},
4347 +     *
4348 +     * <p>The view's {@code iterator} is a "weakly consistent" iterator
4349 +     * that will never throw {@link ConcurrentModificationException},
4350 +     * and guarantees to traverse elements as they existed upon
4351 +     * construction of the iterator, and may (but is not guaranteed to)
4352 +     * reflect any modifications subsequent to construction.
4353 +     */
4354 +    public static final class ValuesView<K,V> extends CHMView<K,V>
4355 +        implements Collection<V> {
4356 +        ValuesView(ConcurrentHashMapV8<K, V> map)   { super(map); }
4357 +        public final boolean contains(Object o) { return map.containsValue(o); }
4358 +        public final boolean remove(Object o) {
4359 +            if (o != null) {
4360 +                Iterator<V> it = new ValueIterator<K,V>(map);
4361 +                while (it.hasNext()) {
4362 +                    if (o.equals(it.next())) {
4363 +                        it.remove();
4364 +                        return true;
4365 +                    }
4366 +                }
4367 +            }
4368 +            return false;
4369 +        }
4370 +
4371 +        /**
4372 +         * Returns a "weakly consistent" iterator that will never
4373 +         * throw {@link ConcurrentModificationException}, and
4374 +         * guarantees to traverse elements as they existed upon
4375 +         * construction of the iterator, and may (but is not
4376 +         * guaranteed to) reflect any modifications subsequent to
4377 +         * construction.
4378 +         *
4379 +         * @return an iterator over the values of this map
4380 +         */
4381 +        public final Iterator<V> iterator() {
4382 +            return new ValueIterator<K,V>(map);
4383 +        }
4384 +        public final boolean add(V e) {
4385 +            throw new UnsupportedOperationException();
4386 +        }
4387 +        public final boolean addAll(Collection<? extends V> c) {
4388 +            throw new UnsupportedOperationException();
4389 +        }
4390 +
4391 +        /**
4392 +         * Performs the given action for each value.
4393 +         *
4394 +         * @param action the action
4395 +         */
4396 +        public void forEach(Action<V> action) {
4397 +            ForkJoinTasks.forEachValue
4398 +                (map, action).invoke();
4399 +        }
4400 +
4401 +        /**
4402 +         * Performs the given action for each non-null transformation
4403 +         * of each value.
4404 +         *
4405 +         * @param transformer a function returning the transformation
4406 +         * for an element, or null of there is no transformation (in
4407 +         * which case the action is not applied).
4408 +         */
4409 +        public <U> void forEach(Fun<? super V, ? extends U> transformer,
4410 +                                     Action<U> action) {
4411 +            ForkJoinTasks.forEachValue
4412 +                (map, transformer, action).invoke();
4413 +        }
4414 +
4415 +        /**
4416 +         * Returns a non-null result from applying the given search
4417 +         * function on each value, or null if none.  Upon success,
4418 +         * further element processing is suppressed and the results of
4419 +         * any other parallel invocations of the search function are
4420 +         * ignored.
4421 +         *
4422 +         * @param searchFunction a function returning a non-null
4423 +         * result on success, else null
4424 +         * @return a non-null result from applying the given search
4425 +         * function on each value, or null if none
4426 +         *
4427 +         */
4428 +        public <U> U search(Fun<? super V, ? extends U> searchFunction) {
4429 +            return ForkJoinTasks.searchValues
4430 +                (map, searchFunction).invoke();
4431 +        }
4432 +
4433 +        /**
4434 +         * Returns the result of accumulating all values using the
4435 +         * given reducer to combine values, or null if none.
4436 +         *
4437 +         * @param reducer a commutative associative combining function
4438 +         * @return  the result of accumulating all values
4439 +         */
4440 +        public V reduce(BiFun<? super V, ? super V, ? extends V> reducer) {
4441 +            return ForkJoinTasks.reduceValues
4442 +                (map, reducer).invoke();
4443 +        }
4444 +
4445 +        /**
4446 +         * Returns the result of accumulating the given transformation
4447 +         * of all values using the given reducer to combine values, or
4448 +         * null if none.
4449 +         *
4450 +         * @param transformer a function returning the transformation
4451 +         * for an element, or null of there is no transformation (in
4452 +         * which case it is not combined).
4453 +         * @param reducer a commutative associative combining function
4454 +         * @return the result of accumulating the given transformation
4455 +         * of all values
4456 +         */
4457 +        public <U> U reduce(Fun<? super V, ? extends U> transformer,
4458 +                            BiFun<? super U, ? super U, ? extends U> reducer) {
4459 +            return ForkJoinTasks.reduceValues
4460 +                (map, transformer, reducer).invoke();
4461 +        }
4462 +
4463 +        /**
4464 +         * Returns the result of accumulating the given transformation
4465 +         * of all values using the given reducer to combine values,
4466 +         * and the given basis as an identity value.
4467 +         *
4468 +         * @param transformer a function returning the transformation
4469 +         * for an element
4470 +         * @param basis the identity (initial default value) for the reduction
4471 +         * @param reducer a commutative associative combining function
4472 +         * @return the result of accumulating the given transformation
4473 +         * of all values
4474 +         */
4475 +        public double reduceToDouble(ObjectToDouble<? super V> transformer,
4476 +                                     double basis,
4477 +                                     DoubleByDoubleToDouble reducer) {
4478 +            return ForkJoinTasks.reduceValuesToDouble
4479 +                (map, transformer, basis, reducer).invoke();
4480 +        }
4481 +
4482 +        /**
4483 +         * Returns the result of accumulating the given transformation
4484 +         * of all values using the given reducer to combine values,
4485 +         * and the given basis as an identity value.
4486 +         *
4487 +         * @param transformer a function returning the transformation
4488 +         * for an element
4489 +         * @param basis the identity (initial default value) for the reduction
4490 +         * @param reducer a commutative associative combining function
4491 +         * @return the result of accumulating the given transformation
4492 +         * of all values
4493 +         */
4494 +        public long reduceToLong(ObjectToLong<? super V> transformer,
4495 +                                 long basis,
4496 +                                 LongByLongToLong reducer) {
4497 +            return ForkJoinTasks.reduceValuesToLong
4498 +                (map, transformer, basis, reducer).invoke();
4499 +        }
4500 +
4501 +        /**
4502 +         * Returns the result of accumulating the given transformation
4503 +         * of all values using the given reducer to combine values,
4504 +         * and the given basis as an identity value.
4505 +         *
4506 +         * @param transformer a function returning the transformation
4507 +         * for an element
4508 +         * @param basis the identity (initial default value) for the reduction
4509 +         * @param reducer a commutative associative combining function
4510 +         * @return the result of accumulating the given transformation
4511 +         * of all values
4512 +         */
4513 +        public int reduceToInt(ObjectToInt<? super V> transformer,
4514 +                               int basis,
4515 +                               IntByIntToInt reducer) {
4516 +            return ForkJoinTasks.reduceValuesToInt
4517 +                (map, transformer, basis, reducer).invoke();
4518 +        }
4519 +
4520 +    }
4521 +
4522 +    /**
4523 +     * A view of a ConcurrentHashMapV8 as a {@link Set} of (key, value)
4524 +     * entries.  This class cannot be directly instantiated. See
4525 +     * {@link #entrySet}.
4526 +     */
4527 +    public static final class EntrySetView<K,V> extends CHMView<K,V>
4528 +        implements Set<Map.Entry<K,V>> {
4529 +        EntrySetView(ConcurrentHashMapV8<K, V> map) { super(map); }
4530 +        public final boolean contains(Object o) {
4531 +            Object k, v, r; Map.Entry<?,?> e;
4532 +            return ((o instanceof Map.Entry) &&
4533 +                    (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
4534 +                    (r = map.get(k)) != null &&
4535 +                    (v = e.getValue()) != null &&
4536 +                    (v == r || v.equals(r)));
4537 +        }
4538 +        public final boolean remove(Object o) {
4539 +            Object k, v; Map.Entry<?,?> e;
4540 +            return ((o instanceof Map.Entry) &&
4541 +                    (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
4542 +                    (v = e.getValue()) != null &&
4543 +                    map.remove(k, v));
4544 +        }
4545 +
4546 +        /**
4547 +         * Returns a "weakly consistent" iterator that will never
4548 +         * throw {@link ConcurrentModificationException}, and
4549 +         * guarantees to traverse elements as they existed upon
4550 +         * construction of the iterator, and may (but is not
4551 +         * guaranteed to) reflect any modifications subsequent to
4552 +         * construction.
4553 +         *
4554 +         * @return an iterator over the entries of this map
4555 +         */
4556 +        public final Iterator<Map.Entry<K,V>> iterator() {
4557 +            return new EntryIterator<K,V>(map);
4558 +        }
4559 +
4560 +        public final boolean add(Entry<K,V> e) {
4561 +            K key = e.getKey();
4562 +            V value = e.getValue();
4563 +            if (key == null || value == null)
4564 +                throw new NullPointerException();
4565 +            return map.internalPut(key, value) == null;
4566 +        }
4567 +        public final boolean addAll(Collection<? extends Entry<K,V>> c) {
4568 +            boolean added = false;
4569 +            for (Entry<K,V> e : c) {
4570 +                if (add(e))
4571 +                    added = true;
4572 +            }
4573 +            return added;
4574 +        }
4575 +        public boolean equals(Object o) {
4576 +            Set<?> c;
4577 +            return ((o instanceof Set) &&
4578 +                    ((c = (Set<?>)o) == this ||
4579 +                     (containsAll(c) && c.containsAll(this))));
4580 +        }
4581 +
4582 +        /**
4583 +         * Performs the given action for each entry.
4584 +         *
4585 +         * @param action the action
4586 +         */
4587 +        public void forEach(Action<Map.Entry<K,V>> action) {
4588 +            ForkJoinTasks.forEachEntry
4589 +                (map, action).invoke();
4590 +        }
4591 +
4592 +        /**
4593 +         * Performs the given action for each non-null transformation
4594 +         * of each entry.
4595 +         *
4596 +         * @param transformer a function returning the transformation
4597 +         * for an element, or null of there is no transformation (in
4598 +         * which case the action is not applied).
4599 +         * @param action the action
4600 +         */
4601 +        public <U> void forEach(Fun<Map.Entry<K,V>, ? extends U> transformer,
4602 +                                Action<U> action) {
4603 +            ForkJoinTasks.forEachEntry
4604 +                (map, transformer, action).invoke();
4605 +        }
4606 +
4607 +        /**
4608 +         * Returns a non-null result from applying the given search
4609 +         * function on each entry, or null if none.  Upon success,
4610 +         * further element processing is suppressed and the results of
4611 +         * any other parallel invocations of the search function are
4612 +         * ignored.
4613 +         *
4614 +         * @param searchFunction a function returning a non-null
4615 +         * result on success, else null
4616 +         * @return a non-null result from applying the given search
4617 +         * function on each entry, or null if none
4618 +         */
4619 +        public <U> U search(Fun<Map.Entry<K,V>, ? extends U> searchFunction) {
4620 +            return ForkJoinTasks.searchEntries
4621 +                (map, searchFunction).invoke();
4622 +        }
4623 +
4624 +        /**
4625 +         * Returns the result of accumulating all entries using the
4626 +         * given reducer to combine values, or null if none.
4627 +         *
4628 +         * @param reducer a commutative associative combining function
4629 +         * @return the result of accumulating all entries
4630 +         */
4631 +        public Map.Entry<K,V> reduce(BiFun<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
4632 +            return ForkJoinTasks.reduceEntries
4633 +                (map, reducer).invoke();
4634 +        }
4635 +
4636 +        /**
4637 +         * Returns the result of accumulating the given transformation
4638 +         * of all entries using the given reducer to combine values,
4639 +         * or null if none.
4640 +         *
4641 +         * @param transformer a function returning the transformation
4642 +         * for an element, or null of there is no transformation (in
4643 +         * which case it is not combined).
4644 +         * @param reducer a commutative associative combining function
4645 +         * @return the result of accumulating the given transformation
4646 +         * of all entries
4647 +         */
4648 +        public <U> U reduce(Fun<Map.Entry<K,V>, ? extends U> transformer,
4649 +                            BiFun<? super U, ? super U, ? extends U> reducer) {
4650 +            return ForkJoinTasks.reduceEntries
4651 +                (map, transformer, reducer).invoke();
4652 +        }
4653 +
4654 +        /**
4655 +         * Returns the result of accumulating the given transformation
4656 +         * of all entries using the given reducer to combine values,
4657 +         * and the given basis as an identity value.
4658 +         *
4659 +         * @param transformer a function returning the transformation
4660 +         * for an element
4661 +         * @param basis the identity (initial default value) for the reduction
4662 +         * @param reducer a commutative associative combining function
4663 +         * @return the result of accumulating the given transformation
4664 +         * of all entries
4665 +         */
4666 +        public double reduceToDouble(ObjectToDouble<Map.Entry<K,V>> transformer,
4667 +                                     double basis,
4668 +                                     DoubleByDoubleToDouble reducer) {
4669 +            return ForkJoinTasks.reduceEntriesToDouble
4670 +                (map, transformer, basis, reducer).invoke();
4671 +        }
4672 +
4673 +        /**
4674 +         * Returns the result of accumulating the given transformation
4675 +         * of all entries using the given reducer to combine values,
4676 +         * and the given basis as an identity value.
4677 +         *
4678 +         * @param transformer a function returning the transformation
4679 +         * for an element
4680 +         * @param basis the identity (initial default value) for the reduction
4681 +         * @param reducer a commutative associative combining function
4682 +         * @return  the result of accumulating the given transformation
4683 +         * of all entries
4684 +         */
4685 +        public long reduceToLong(ObjectToLong<Map.Entry<K,V>> transformer,
4686 +                                 long basis,
4687 +                                 LongByLongToLong reducer) {
4688 +            return ForkJoinTasks.reduceEntriesToLong
4689 +                (map, transformer, basis, reducer).invoke();
4690 +        }
4691 +
4692 +        /**
4693 +         * Returns the result of accumulating the given transformation
4694 +         * of all entries using the given reducer to combine values,
4695 +         * and the given basis as an identity value.
4696 +         *
4697 +         * @param transformer a function returning the transformation
4698 +         * for an element
4699 +         * @param basis the identity (initial default value) for the reduction
4700 +         * @param reducer a commutative associative combining function
4701 +         * @return the result of accumulating the given transformation
4702 +         * of all entries
4703 +         */
4704 +        public int reduceToInt(ObjectToInt<Map.Entry<K,V>> transformer,
4705 +                               int basis,
4706 +                               IntByIntToInt reducer) {
4707 +            return ForkJoinTasks.reduceEntriesToInt
4708 +                (map, transformer, basis, reducer).invoke();
4709 +        }
4710 +
4711 +    }
4712 +
4713 +    // ---------------------------------------------------------------------
4714 +
4715 +    /**
4716 +     * Predefined tasks for performing bulk parallel operations on
4717 +     * ConcurrentHashMapV8s. These tasks follow the forms and rules used
4718 +     * for bulk operations. Each method has the same name, but returns
4719 +     * a task rather than invoking it. These methods may be useful in
4720 +     * custom applications such as submitting a task without waiting
4721 +     * for completion, using a custom pool, or combining with other
4722 +     * tasks.
4723 +     */
4724 +    public static class ForkJoinTasks {
4725 +        private ForkJoinTasks() {}
4726 +
4727 +        /**
4728 +         * Returns a task that when invoked, performs the given
4729 +         * action for each (key, value)
4730 +         *
4731 +         * @param map the map
4732 +         * @param action the action
4733 +         * @return the task
4734 +         */
4735 +        public static <K,V> ForkJoinTask<Void> forEach
4736 +            (ConcurrentHashMapV8<K,V> map,
4737 +             BiAction<K,V> action) {
4738 +            if (action == null) throw new NullPointerException();
4739 +            return new ForEachMappingTask<K,V>(map, null, -1, action);
4740 +        }
4741 +
4742 +        /**
4743 +         * Returns a task that when invoked, performs the given
4744 +         * action for each non-null transformation of each (key, value)
4745 +         *
4746 +         * @param map the map
4747 +         * @param transformer a function returning the transformation
4748 +         * for an element, or null if there is no transformation (in
4749 +         * which case the action is not applied)
4750 +         * @param action the action
4751 +         * @return the task
4752 +         */
4753 +        public static <K,V,U> ForkJoinTask<Void> forEach
4754 +            (ConcurrentHashMapV8<K,V> map,
4755 +             BiFun<? super K, ? super V, ? extends U> transformer,
4756 +             Action<U> action) {
4757 +            if (transformer == null || action == null)
4758 +                throw new NullPointerException();
4759 +            return new ForEachTransformedMappingTask<K,V,U>
4760 +                (map, null, -1, transformer, action);
4761 +        }
4762 +
4763 +        /**
4764 +         * Returns a task that when invoked, returns a non-null result
4765 +         * from applying the given search function on each (key,
4766 +         * value), or null if none. Upon success, further element
4767 +         * processing is suppressed and the results of any other
4768 +         * parallel invocations of the search function are ignored.
4769 +         *
4770 +         * @param map the map
4771 +         * @param searchFunction a function returning a non-null
4772 +         * result on success, else null
4773 +         * @return the task
4774 +         */
4775 +        public static <K,V,U> ForkJoinTask<U> search
4776 +            (ConcurrentHashMapV8<K,V> map,
4777 +             BiFun<? super K, ? super V, ? extends U> searchFunction) {
4778 +            if (searchFunction == null) throw new NullPointerException();
4779 +            return new SearchMappingsTask<K,V,U>
4780 +                (map, null, -1, searchFunction,
4781 +                 new AtomicReference<U>());
4782 +        }
4783 +
4784 +        /**
4785 +         * Returns a task that when invoked, returns the result of
4786 +         * accumulating the given transformation of all (key, value) pairs
4787 +         * using the given reducer to combine values, or null if none.
4788 +         *
4789 +         * @param map the map
4790 +         * @param transformer a function returning the transformation
4791 +         * for an element, or null if there is no transformation (in
4792 +         * which case it is not combined).
4793 +         * @param reducer a commutative associative combining function
4794 +         * @return the task
4795 +         */
4796 +        public static <K,V,U> ForkJoinTask<U> reduce
4797 +            (ConcurrentHashMapV8<K,V> map,
4798 +             BiFun<? super K, ? super V, ? extends U> transformer,
4799 +             BiFun<? super U, ? super U, ? extends U> reducer) {
4800 +            if (transformer == null || reducer == null)
4801 +                throw new NullPointerException();
4802 +            return new MapReduceMappingsTask<K,V,U>
4803 +                (map, null, -1, null, transformer, reducer);
4804 +        }
4805 +
4806 +        /**
4807 +         * Returns a task that when invoked, returns the result of
4808 +         * accumulating the given transformation of all (key, value) pairs
4809 +         * using the given reducer to combine values, and the given
4810 +         * basis as an identity value.
4811 +         *
4812 +         * @param map the map
4813 +         * @param transformer a function returning the transformation
4814 +         * for an element
4815 +         * @param basis the identity (initial default value) for the reduction
4816 +         * @param reducer a commutative associative combining function
4817 +         * @return the task
4818 +         */
4819 +        public static <K,V> ForkJoinTask<Double> reduceToDouble
4820 +            (ConcurrentHashMapV8<K,V> map,
4821 +             ObjectByObjectToDouble<? super K, ? super V> transformer,
4822 +             double basis,
4823 +             DoubleByDoubleToDouble reducer) {
4824 +            if (transformer == null || reducer == null)
4825 +                throw new NullPointerException();
4826 +            return new MapReduceMappingsToDoubleTask<K,V>
4827 +                (map, null, -1, null, transformer, basis, reducer);
4828 +        }
4829 +
4830 +        /**
4831 +         * Returns a task that when invoked, returns the result of
4832 +         * accumulating the given transformation of all (key, value) pairs
4833 +         * using the given reducer to combine values, and the given
4834 +         * basis as an identity value.
4835 +         *
4836 +         * @param map the map
4837 +         * @param transformer a function returning the transformation
4838 +         * for an element
4839 +         * @param basis the identity (initial default value) for the reduction
4840 +         * @param reducer a commutative associative combining function
4841 +         * @return the task
4842 +         */
4843 +        public static <K,V> ForkJoinTask<Long> reduceToLong
4844 +            (ConcurrentHashMapV8<K,V> map,
4845 +             ObjectByObjectToLong<? super K, ? super V> transformer,
4846 +             long basis,
4847 +             LongByLongToLong reducer) {
4848 +            if (transformer == null || reducer == null)
4849 +                throw new NullPointerException();
4850 +            return new MapReduceMappingsToLongTask<K,V>
4851 +                (map, null, -1, null, transformer, basis, reducer);
4852 +        }
4853 +
4854 +        /**
4855 +         * Returns a task that when invoked, returns the result of
4856 +         * accumulating the given transformation of all (key, value) pairs
4857 +         * using the given reducer to combine values, and the given
4858 +         * basis as an identity value.
4859 +         *
4860 +         * @param transformer a function returning the transformation
4861 +         * for an element
4862 +         * @param basis the identity (initial default value) for the reduction
4863 +         * @param reducer a commutative associative combining function
4864 +         * @return the task
4865 +         */
4866 +        public static <K,V> ForkJoinTask<Integer> reduceToInt
4867 +            (ConcurrentHashMapV8<K,V> map,
4868 +             ObjectByObjectToInt<? super K, ? super V> transformer,
4869 +             int basis,
4870 +             IntByIntToInt reducer) {
4871 +            if (transformer == null || reducer == null)
4872 +                throw new NullPointerException();
4873 +            return new MapReduceMappingsToIntTask<K,V>
4874 +                (map, null, -1, null, transformer, basis, reducer);
4875 +        }
4876 +
4877 +        /**
4878 +         * Returns a task that when invoked, performs the given action
4879 +         * for each key.
4880 +         *
4881 +         * @param map the map
4882 +         * @param action the action
4883 +         * @return the task
4884 +         */
4885 +        public static <K,V> ForkJoinTask<Void> forEachKey
4886 +            (ConcurrentHashMapV8<K,V> map,
4887 +             Action<K> action) {
4888 +            if (action == null) throw new NullPointerException();
4889 +            return new ForEachKeyTask<K,V>(map, null, -1, action);
4890 +        }
4891 +
4892 +        /**
4893 +         * Returns a task that when invoked, performs the given action
4894 +         * for each non-null transformation of each key.
4895 +         *
4896 +         * @param map the map
4897 +         * @param transformer a function returning the transformation
4898 +         * for an element, or null if there is no transformation (in
4899 +         * which case the action is not applied)
4900 +         * @param action the action
4901 +         * @return the task
4902 +         */
4903 +        public static <K,V,U> ForkJoinTask<Void> forEachKey
4904 +            (ConcurrentHashMapV8<K,V> map,
4905 +             Fun<? super K, ? extends U> transformer,
4906 +             Action<U> action) {
4907 +            if (transformer == null || action == null)
4908 +                throw new NullPointerException();
4909 +            return new ForEachTransformedKeyTask<K,V,U>
4910 +                (map, null, -1, transformer, action);
4911 +        }
4912 +
4913 +        /**
4914 +         * Returns a task that when invoked, returns a non-null result
4915 +         * from applying the given search function on each key, or
4916 +         * null if none.  Upon success, further element processing is
4917 +         * suppressed and the results of any other parallel
4918 +         * invocations of the search function are ignored.
4919 +         *
4920 +         * @param map the map
4921 +         * @param searchFunction a function returning a non-null
4922 +         * result on success, else null
4923 +         * @return the task
4924 +         */
4925 +        public static <K,V,U> ForkJoinTask<U> searchKeys
4926 +            (ConcurrentHashMapV8<K,V> map,
4927 +             Fun<? super K, ? extends U> searchFunction) {
4928 +            if (searchFunction == null) throw new NullPointerException();
4929 +            return new SearchKeysTask<K,V,U>
4930 +                (map, null, -1, searchFunction,
4931 +                 new AtomicReference<U>());
4932 +        }
4933 +
4934 +        /**
4935 +         * Returns a task that when invoked, returns the result of
4936 +         * accumulating all keys using the given reducer to combine
4937 +         * values, or null if none.
4938 +         *
4939 +         * @param map the map
4940 +         * @param reducer a commutative associative combining function
4941 +         * @return the task
4942 +         */
4943 +        public static <K,V> ForkJoinTask<K> reduceKeys
4944 +            (ConcurrentHashMapV8<K,V> map,
4945 +             BiFun<? super K, ? super K, ? extends K> reducer) {
4946 +            if (reducer == null) throw new NullPointerException();
4947 +            return new ReduceKeysTask<K,V>
4948 +                (map, null, -1, null, reducer);
4949 +        }
4950 +
4951 +        /**
4952 +         * Returns a task that when invoked, returns the result of
4953 +         * accumulating the given transformation of all keys using the given
4954 +         * reducer to combine values, or null if none.
4955 +         *
4956 +         * @param map the map
4957 +         * @param transformer a function returning the transformation
4958 +         * for an element, or null if there is no transformation (in
4959 +         * which case it is not combined).
4960 +         * @param reducer a commutative associative combining function
4961 +         * @return the task
4962 +         */
4963 +        public static <K,V,U> ForkJoinTask<U> reduceKeys
4964 +            (ConcurrentHashMapV8<K,V> map,
4965 +             Fun<? super K, ? extends U> transformer,
4966 +             BiFun<? super U, ? super U, ? extends U> reducer) {
4967 +            if (transformer == null || reducer == null)
4968 +                throw new NullPointerException();
4969 +            return new MapReduceKeysTask<K,V,U>
4970 +                (map, null, -1, null, transformer, reducer);
4971 +        }
4972 +
4973 +        /**
4974 +         * Returns a task that when invoked, returns the result of
4975 +         * accumulating the given transformation of all keys using the given
4976 +         * reducer to combine values, and the given basis as an
4977 +         * identity value.
4978 +         *
4979 +         * @param map the map
4980 +         * @param transformer a function returning the transformation
4981 +         * for an element
4982 +         * @param basis the identity (initial default value) for the reduction
4983 +         * @param reducer a commutative associative combining function
4984 +         * @return the task
4985 +         */
4986 +        public static <K,V> ForkJoinTask<Double> reduceKeysToDouble
4987 +            (ConcurrentHashMapV8<K,V> map,
4988 +             ObjectToDouble<? super K> transformer,
4989 +             double basis,
4990 +             DoubleByDoubleToDouble reducer) {
4991 +            if (transformer == null || reducer == null)
4992 +                throw new NullPointerException();
4993 +            return new MapReduceKeysToDoubleTask<K,V>
4994 +                (map, null, -1, null, transformer, basis, reducer);
4995 +        }
4996 +
4997 +        /**
4998 +         * Returns a task that when invoked, returns the result of
4999 +         * accumulating the given transformation of all keys using the given
5000 +         * reducer to combine values, and the given basis as an
5001 +         * identity value.
5002 +         *
5003 +         * @param map the map
5004 +         * @param transformer a function returning the transformation
5005 +         * for an element
5006 +         * @param basis the identity (initial default value) for the reduction
5007 +         * @param reducer a commutative associative combining function
5008 +         * @return the task
5009 +         */
5010 +        public static <K,V> ForkJoinTask<Long> reduceKeysToLong
5011 +            (ConcurrentHashMapV8<K,V> map,
5012 +             ObjectToLong<? super K> transformer,
5013 +             long basis,
5014 +             LongByLongToLong reducer) {
5015 +            if (transformer == null || reducer == null)
5016 +                throw new NullPointerException();
5017 +            return new MapReduceKeysToLongTask<K,V>
5018 +                (map, null, -1, null, transformer, basis, reducer);
5019 +        }
5020 +
5021 +        /**
5022 +         * Returns a task that when invoked, returns the result of
5023 +         * accumulating the given transformation of all keys using the given
5024 +         * reducer to combine values, and the given basis as an
5025 +         * identity value.
5026 +         *
5027 +         * @param map the map
5028 +         * @param transformer a function returning the transformation
5029 +         * for an element
5030 +         * @param basis the identity (initial default value) for the reduction
5031 +         * @param reducer a commutative associative combining function
5032 +         * @return the task
5033 +         */
5034 +        public static <K,V> ForkJoinTask<Integer> reduceKeysToInt
5035 +            (ConcurrentHashMapV8<K,V> map,
5036 +             ObjectToInt<? super K> transformer,
5037 +             int basis,
5038 +             IntByIntToInt reducer) {
5039 +            if (transformer == null || reducer == null)
5040 +                throw new NullPointerException();
5041 +            return new MapReduceKeysToIntTask<K,V>
5042 +                (map, null, -1, null, transformer, basis, reducer);
5043 +        }
5044 +
5045 +        /**
5046 +         * Returns a task that when invoked, performs the given action
5047 +         * for each value.
5048 +         *
5049 +         * @param map the map
5050 +         * @param action the action
5051 +         */
5052 +        public static <K,V> ForkJoinTask<Void> forEachValue
5053 +            (ConcurrentHashMapV8<K,V> map,
5054 +             Action<V> action) {
5055 +            if (action == null) throw new NullPointerException();
5056 +            return new ForEachValueTask<K,V>(map, null, -1, action);
5057 +        }
5058 +
5059 +        /**
5060 +         * Returns a task that when invoked, performs the given action
5061 +         * for each non-null transformation of each value.
5062 +         *
5063 +         * @param map the map
5064 +         * @param transformer a function returning the transformation
5065 +         * for an element, or null if there is no transformation (in
5066 +         * which case the action is not applied)
5067 +         * @param action the action
5068 +         */
5069 +        public static <K,V,U> ForkJoinTask<Void> forEachValue
5070 +            (ConcurrentHashMapV8<K,V> map,
5071 +             Fun<? super V, ? extends U> transformer,
5072 +             Action<U> action) {
5073 +            if (transformer == null || action == null)
5074 +                throw new NullPointerException();
5075 +            return new ForEachTransformedValueTask<K,V,U>
5076 +                (map, null, -1, transformer, action);
5077 +        }
5078 +
5079 +        /**
5080 +         * Returns a task that when invoked, returns a non-null result
5081 +         * from applying the given search function on each value, or
5082 +         * null if none.  Upon success, further element processing is
5083 +         * suppressed and the results of any other parallel
5084 +         * invocations of the search function are ignored.
5085 +         *
5086 +         * @param map the map
5087 +         * @param searchFunction a function returning a non-null
5088 +         * result on success, else null
5089 +         * @return the task
5090 +         */
5091 +        public static <K,V,U> ForkJoinTask<U> searchValues
5092 +            (ConcurrentHashMapV8<K,V> map,
5093 +             Fun<? super V, ? extends U> searchFunction) {
5094 +            if (searchFunction == null) throw new NullPointerException();
5095 +            return new SearchValuesTask<K,V,U>
5096 +                (map, null, -1, searchFunction,
5097 +                 new AtomicReference<U>());
5098 +        }
5099 +
5100 +        /**
5101 +         * Returns a task that when invoked, returns the result of
5102 +         * accumulating all values using the given reducer to combine
5103 +         * values, or null if none.
5104 +         *
5105 +         * @param map the map
5106 +         * @param reducer a commutative associative combining function
5107 +         * @return the task
5108 +         */
5109 +        public static <K,V> ForkJoinTask<V> reduceValues
5110 +            (ConcurrentHashMapV8<K,V> map,
5111 +             BiFun<? super V, ? super V, ? extends V> reducer) {
5112 +            if (reducer == null) throw new NullPointerException();
5113 +            return new ReduceValuesTask<K,V>
5114 +                (map, null, -1, null, reducer);
5115 +        }
5116 +
5117 +        /**
5118 +         * Returns a task that when invoked, returns the result of
5119 +         * accumulating the given transformation of all values using the
5120 +         * given reducer to combine values, or null if none.
5121 +         *
5122 +         * @param map the map
5123 +         * @param transformer a function returning the transformation
5124 +         * for an element, or null if there is no transformation (in
5125 +         * which case it is not combined).
5126 +         * @param reducer a commutative associative combining function
5127 +         * @return the task
5128 +         */
5129 +        public static <K,V,U> ForkJoinTask<U> reduceValues
5130 +            (ConcurrentHashMapV8<K,V> map,
5131 +             Fun<? super V, ? extends U> transformer,
5132 +             BiFun<? super U, ? super U, ? extends U> reducer) {
5133 +            if (transformer == null || reducer == null)
5134 +                throw new NullPointerException();
5135 +            return new MapReduceValuesTask<K,V,U>
5136 +                (map, null, -1, null, transformer, reducer);
5137 +        }
5138 +
5139 +        /**
5140 +         * Returns a task that when invoked, returns the result of
5141 +         * accumulating the given transformation of all values using the
5142 +         * given reducer to combine values, and the given basis as an
5143 +         * identity value.
5144 +         *
5145 +         * @param map the map
5146 +         * @param transformer a function returning the transformation
5147 +         * for an element
5148 +         * @param basis the identity (initial default value) for the reduction
5149 +         * @param reducer a commutative associative combining function
5150 +         * @return the task
5151 +         */
5152 +        public static <K,V> ForkJoinTask<Double> reduceValuesToDouble
5153 +            (ConcurrentHashMapV8<K,V> map,
5154 +             ObjectToDouble<? super V> transformer,
5155 +             double basis,
5156 +             DoubleByDoubleToDouble reducer) {
5157 +            if (transformer == null || reducer == null)
5158 +                throw new NullPointerException();
5159 +            return new MapReduceValuesToDoubleTask<K,V>
5160 +                (map, null, -1, null, transformer, basis, reducer);
5161 +        }
5162 +
5163 +        /**
5164 +         * Returns a task that when invoked, returns the result of
5165 +         * accumulating the given transformation of all values using the
5166 +         * given reducer to combine values, and the given basis as an
5167 +         * identity value.
5168 +         *
5169 +         * @param map the map
5170 +         * @param transformer a function returning the transformation
5171 +         * for an element
5172 +         * @param basis the identity (initial default value) for the reduction
5173 +         * @param reducer a commutative associative combining function
5174 +         * @return the task
5175 +         */
5176 +        public static <K,V> ForkJoinTask<Long> reduceValuesToLong
5177 +            (ConcurrentHashMapV8<K,V> map,
5178 +             ObjectToLong<? super V> transformer,
5179 +             long basis,
5180 +             LongByLongToLong reducer) {
5181 +            if (transformer == null || reducer == null)
5182 +                throw new NullPointerException();
5183 +            return new MapReduceValuesToLongTask<K,V>
5184 +                (map, null, -1, null, transformer, basis, reducer);
5185 +        }
5186 +
5187 +        /**
5188 +         * Returns a task that when invoked, returns the result of
5189 +         * accumulating the given transformation of all values using the
5190 +         * given reducer to combine values, and the given basis as an
5191 +         * identity value.
5192 +         *
5193 +         * @param map the map
5194 +         * @param transformer a function returning the transformation
5195 +         * for an element
5196 +         * @param basis the identity (initial default value) for the reduction
5197 +         * @param reducer a commutative associative combining function
5198 +         * @return the task
5199 +         */
5200 +        public static <K,V> ForkJoinTask<Integer> reduceValuesToInt
5201 +            (ConcurrentHashMapV8<K,V> map,
5202 +             ObjectToInt<? super V> transformer,
5203 +             int basis,
5204 +             IntByIntToInt reducer) {
5205 +            if (transformer == null || reducer == null)
5206 +                throw new NullPointerException();
5207 +            return new MapReduceValuesToIntTask<K,V>
5208 +                (map, null, -1, null, transformer, basis, reducer);
5209 +        }
5210 +
5211 +        /**
5212 +         * Returns a task that when invoked, perform the given action
5213 +         * for each entry.
5214 +         *
5215 +         * @param map the map
5216 +         * @param action the action
5217 +         */
5218 +        public static <K,V> ForkJoinTask<Void> forEachEntry
5219 +            (ConcurrentHashMapV8<K,V> map,
5220 +             Action<Map.Entry<K,V>> action) {
5221 +            if (action == null) throw new NullPointerException();
5222 +            return new ForEachEntryTask<K,V>(map, null, -1, action);
5223 +        }
5224 +
5225 +        /**
5226 +         * Returns a task that when invoked, perform the given action
5227 +         * for each non-null transformation of each entry.
5228 +         *
5229 +         * @param map the map
5230 +         * @param transformer a function returning the transformation
5231 +         * for an element, or null if there is no transformation (in
5232 +         * which case the action is not applied)
5233 +         * @param action the action
5234 +         */
5235 +        public static <K,V,U> ForkJoinTask<Void> forEachEntry
5236 +            (ConcurrentHashMapV8<K,V> map,
5237 +             Fun<Map.Entry<K,V>, ? extends U> transformer,
5238 +             Action<U> action) {
5239 +            if (transformer == null || action == null)
5240 +                throw new NullPointerException();
5241 +            return new ForEachTransformedEntryTask<K,V,U>
5242 +                (map, null, -1, transformer, action);
5243 +        }
5244 +
5245 +        /**
5246 +         * Returns a task that when invoked, returns a non-null result
5247 +         * from applying the given search function on each entry, or
5248 +         * null if none.  Upon success, further element processing is
5249 +         * suppressed and the results of any other parallel
5250 +         * invocations of the search function are ignored.
5251 +         *
5252 +         * @param map the map
5253 +         * @param searchFunction a function returning a non-null
5254 +         * result on success, else null
5255 +         * @return the task
5256 +         */
5257 +        public static <K,V,U> ForkJoinTask<U> searchEntries
5258 +            (ConcurrentHashMapV8<K,V> map,
5259 +             Fun<Map.Entry<K,V>, ? extends U> searchFunction) {
5260 +            if (searchFunction == null) throw new NullPointerException();
5261 +            return new SearchEntriesTask<K,V,U>
5262 +                (map, null, -1, searchFunction,
5263 +                 new AtomicReference<U>());
5264 +        }
5265 +
5266 +        /**
5267 +         * Returns a task that when invoked, returns the result of
5268 +         * accumulating all entries using the given reducer to combine
5269 +         * values, or null if none.
5270 +         *
5271 +         * @param map the map
5272 +         * @param reducer a commutative associative combining function
5273 +         * @return the task
5274 +         */
5275 +        public static <K,V> ForkJoinTask<Map.Entry<K,V>> reduceEntries
5276 +            (ConcurrentHashMapV8<K,V> map,
5277 +             BiFun<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
5278 +            if (reducer == null) throw new NullPointerException();
5279 +            return new ReduceEntriesTask<K,V>
5280 +                (map, null, -1, null, reducer);
5281 +        }
5282 +
5283 +        /**
5284 +         * Returns a task that when invoked, returns the result of
5285 +         * accumulating the given transformation of all entries using the
5286 +         * given reducer to combine values, or null if none.
5287 +         *
5288 +         * @param map the map
5289 +         * @param transformer a function returning the transformation
5290 +         * for an element, or null if there is no transformation (in
5291 +         * which case it is not combined).
5292 +         * @param reducer a commutative associative combining function
5293 +         * @return the task
5294 +         */
5295 +        public static <K,V,U> ForkJoinTask<U> reduceEntries
5296 +            (ConcurrentHashMapV8<K,V> map,
5297 +             Fun<Map.Entry<K,V>, ? extends U> transformer,
5298 +             BiFun<? super U, ? super U, ? extends U> reducer) {
5299 +            if (transformer == null || reducer == null)
5300 +                throw new NullPointerException();
5301 +            return new MapReduceEntriesTask<K,V,U>
5302 +                (map, null, -1, null, transformer, reducer);
5303 +        }
5304 +
5305 +        /**
5306 +         * Returns a task that when invoked, returns the result of
5307 +         * accumulating the given transformation of all entries using the
5308 +         * given reducer to combine values, and the given basis as an
5309 +         * identity value.
5310 +         *
5311 +         * @param map the map
5312 +         * @param transformer a function returning the transformation
5313 +         * for an element
5314 +         * @param basis the identity (initial default value) for the reduction
5315 +         * @param reducer a commutative associative combining function
5316 +         * @return the task
5317 +         */
5318 +        public static <K,V> ForkJoinTask<Double> reduceEntriesToDouble
5319 +            (ConcurrentHashMapV8<K,V> map,
5320 +             ObjectToDouble<Map.Entry<K,V>> transformer,
5321 +             double basis,
5322 +             DoubleByDoubleToDouble reducer) {
5323 +            if (transformer == null || reducer == null)
5324 +                throw new NullPointerException();
5325 +            return new MapReduceEntriesToDoubleTask<K,V>
5326 +                (map, null, -1, null, transformer, basis, reducer);
5327 +        }
5328 +
5329 +        /**
5330 +         * Returns a task that when invoked, returns the result of
5331 +         * accumulating the given transformation of all entries using the
5332 +         * given reducer to combine values, and the given basis as an
5333 +         * identity value.
5334 +         *
5335 +         * @param map the map
5336 +         * @param transformer a function returning the transformation
5337 +         * for an element
5338 +         * @param basis the identity (initial default value) for the reduction
5339 +         * @param reducer a commutative associative combining function
5340 +         * @return the task
5341 +         */
5342 +        public static <K,V> ForkJoinTask<Long> reduceEntriesToLong
5343 +            (ConcurrentHashMapV8<K,V> map,
5344 +             ObjectToLong<Map.Entry<K,V>> transformer,
5345 +             long basis,
5346 +             LongByLongToLong reducer) {
5347 +            if (transformer == null || reducer == null)
5348 +                throw new NullPointerException();
5349 +            return new MapReduceEntriesToLongTask<K,V>
5350 +                (map, null, -1, null, transformer, basis, reducer);
5351 +        }
5352 +
5353 +        /**
5354 +         * Returns a task that when invoked, returns the result of
5355 +         * accumulating the given transformation of all entries using the
5356 +         * given reducer to combine values, and the given basis as an
5357 +         * identity value.
5358 +         *
5359 +         * @param map the map
5360 +         * @param transformer a function returning the transformation
5361 +         * for an element
5362 +         * @param basis the identity (initial default value) for the reduction
5363 +         * @param reducer a commutative associative combining function
5364 +         * @return the task
5365 +         */
5366 +        public static <K,V> ForkJoinTask<Integer> reduceEntriesToInt
5367 +            (ConcurrentHashMapV8<K,V> map,
5368 +             ObjectToInt<Map.Entry<K,V>> transformer,
5369 +             int basis,
5370 +             IntByIntToInt reducer) {
5371 +            if (transformer == null || reducer == null)
5372 +                throw new NullPointerException();
5373 +            return new MapReduceEntriesToIntTask<K,V>
5374 +                (map, null, -1, null, transformer, basis, reducer);
5375 +        }
5376 +    }
5377 +
5378 +    // -------------------------------------------------------
5379 +
5380 +    /*
5381 +     * Task classes. Coded in a regular but ugly format/style to
5382 +     * simplify checks that each variant differs in the right way from
5383 +     * others.
5384 +     */
5385 +
5386 +    @SuppressWarnings("serial") static final class ForEachKeyTask<K,V>
5387 +        extends Traverser<K,V,Void> {
5388 +        final Action<K> action;
5389 +        ForEachKeyTask
5390 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
5391 +             Action<K> action) {
5392 +            super(m, p, b);
5393 +            this.action = action;
5394 +        }
5395 +        @SuppressWarnings("unchecked") public final void compute() {
5396 +            final Action<K> action;
5397 +            if ((action = this.action) == null)
5398 +                throw new NullPointerException();
5399 +            for (int b; (b = preSplit()) > 0;)
5400 +                new ForEachKeyTask<K,V>(map, this, b, action).fork();
5401 +            while (advance() != null)
5402 +                action.apply((K)nextKey);
5403 +            propagateCompletion();
5404 +        }
5405 +    }
5406 +
5407 +    @SuppressWarnings("serial") static final class ForEachValueTask<K,V>
5408 +        extends Traverser<K,V,Void> {
5409 +        final Action<V> action;
5410 +        ForEachValueTask
5411 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
5412 +             Action<V> action) {
5413 +            super(m, p, b);
5414 +            this.action = action;
5415 +        }
5416 +        @SuppressWarnings("unchecked") public final void compute() {
5417 +            final Action<V> action;
5418 +            if ((action = this.action) == null)
5419 +                throw new NullPointerException();
5420 +            for (int b; (b = preSplit()) > 0;)
5421 +                new ForEachValueTask<K,V>(map, this, b, action).fork();
5422 +            Object v;
5423 +            while ((v = advance()) != null)
5424 +                action.apply((V)v);
5425 +            propagateCompletion();
5426 +        }
5427 +    }
5428 +
5429 +    @SuppressWarnings("serial") static final class ForEachEntryTask<K,V>
5430 +        extends Traverser<K,V,Void> {
5431 +        final Action<Entry<K,V>> action;
5432 +        ForEachEntryTask
5433 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
5434 +             Action<Entry<K,V>> action) {
5435 +            super(m, p, b);
5436 +            this.action = action;
5437 +        }
5438 +        @SuppressWarnings("unchecked") public final void compute() {
5439 +            final Action<Entry<K,V>> action;
5440 +            if ((action = this.action) == null)
5441 +                throw new NullPointerException();
5442 +            for (int b; (b = preSplit()) > 0;)
5443 +                new ForEachEntryTask<K,V>(map, this, b, action).fork();
5444 +            Object v;
5445 +            while ((v = advance()) != null)
5446 +                action.apply(entryFor((K)nextKey, (V)v));
5447 +            propagateCompletion();
5448 +        }
5449 +    }
5450 +
5451 +    @SuppressWarnings("serial") static final class ForEachMappingTask<K,V>
5452 +        extends Traverser<K,V,Void> {
5453 +        final BiAction<K,V> action;
5454 +        ForEachMappingTask
5455 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
5456 +             BiAction<K,V> action) {
5457 +            super(m, p, b);
5458 +            this.action = action;
5459 +        }
5460 +        @SuppressWarnings("unchecked") public final void compute() {
5461 +            final BiAction<K,V> action;
5462 +            if ((action = this.action) == null)
5463 +                throw new NullPointerException();
5464 +            for (int b; (b = preSplit()) > 0;)
5465 +                new ForEachMappingTask<K,V>(map, this, b, action).fork();
5466 +            Object v;
5467 +            while ((v = advance()) != null)
5468 +                action.apply((K)nextKey, (V)v);
5469 +            propagateCompletion();
5470 +        }
5471 +    }
5472 +
5473 +    @SuppressWarnings("serial") static final class ForEachTransformedKeyTask<K,V,U>
5474 +        extends Traverser<K,V,Void> {
5475 +        final Fun<? super K, ? extends U> transformer;
5476 +        final Action<U> action;
5477 +        ForEachTransformedKeyTask
5478 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
5479 +             Fun<? super K, ? extends U> transformer, Action<U> action) {
5480 +            super(m, p, b);
5481 +            this.transformer = transformer; this.action = action;
5482 +        }
5483 +        @SuppressWarnings("unchecked") public final void compute() {
5484 +            final Fun<? super K, ? extends U> transformer;
5485 +            final Action<U> action;
5486 +            if ((transformer = this.transformer) == null ||
5487 +                (action = this.action) == null)
5488 +                throw new NullPointerException();
5489 +            for (int b; (b = preSplit()) > 0;)
5490 +                new ForEachTransformedKeyTask<K,V,U>
5491 +                     (map, this, b, transformer, action).fork();
5492 +            U u;
5493 +            while (advance() != null) {
5494 +                if ((u = transformer.apply((K)nextKey)) != null)
5495 +                    action.apply(u);
5496 +            }
5497 +            propagateCompletion();
5498 +        }
5499 +    }
5500 +
5501 +    @SuppressWarnings("serial") static final class ForEachTransformedValueTask<K,V,U>
5502 +        extends Traverser<K,V,Void> {
5503 +        final Fun<? super V, ? extends U> transformer;
5504 +        final Action<U> action;
5505 +        ForEachTransformedValueTask
5506 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
5507 +             Fun<? super V, ? extends U> transformer, Action<U> action) {
5508 +            super(m, p, b);
5509 +            this.transformer = transformer; this.action = action;
5510 +        }
5511 +        @SuppressWarnings("unchecked") public final void compute() {
5512 +            final Fun<? super V, ? extends U> transformer;
5513 +            final Action<U> action;
5514 +            if ((transformer = this.transformer) == null ||
5515 +                (action = this.action) == null)
5516 +                throw new NullPointerException();
5517 +            for (int b; (b = preSplit()) > 0;)
5518 +                new ForEachTransformedValueTask<K,V,U>
5519 +                    (map, this, b, transformer, action).fork();
5520 +            Object v; U u;
5521 +            while ((v = advance()) != null) {
5522 +                if ((u = transformer.apply((V)v)) != null)
5523 +                    action.apply(u);
5524 +            }
5525 +            propagateCompletion();
5526 +        }
5527 +    }
5528 +
5529 +    @SuppressWarnings("serial") static final class ForEachTransformedEntryTask<K,V,U>
5530 +        extends Traverser<K,V,Void> {
5531 +        final Fun<Map.Entry<K,V>, ? extends U> transformer;
5532 +        final Action<U> action;
5533 +        ForEachTransformedEntryTask
5534 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
5535 +             Fun<Map.Entry<K,V>, ? extends U> transformer, Action<U> action) {
5536 +            super(m, p, b);
5537 +            this.transformer = transformer; this.action = action;
5538 +        }
5539 +        @SuppressWarnings("unchecked") public final void compute() {
5540 +            final Fun<Map.Entry<K,V>, ? extends U> transformer;
5541 +            final Action<U> action;
5542 +            if ((transformer = this.transformer) == null ||
5543 +                (action = this.action) == null)
5544 +                throw new NullPointerException();
5545 +            for (int b; (b = preSplit()) > 0;)
5546 +                new ForEachTransformedEntryTask<K,V,U>
5547 +                    (map, this, b, transformer, action).fork();
5548 +            Object v; U u;
5549 +            while ((v = advance()) != null) {
5550 +                if ((u = transformer.apply(entryFor((K)nextKey, (V)v))) != null)
5551 +                    action.apply(u);
5552 +            }
5553 +            propagateCompletion();
5554 +        }
5555 +    }
5556 +
5557 +    @SuppressWarnings("serial") static final class ForEachTransformedMappingTask<K,V,U>
5558 +        extends Traverser<K,V,Void> {
5559 +        final BiFun<? super K, ? super V, ? extends U> transformer;
5560 +        final Action<U> action;
5561 +        ForEachTransformedMappingTask
5562 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
5563 +             BiFun<? super K, ? super V, ? extends U> transformer,
5564 +             Action<U> action) {
5565 +            super(m, p, b);
5566 +            this.transformer = transformer; this.action = action;
5567 +        }
5568 +        @SuppressWarnings("unchecked") public final void compute() {
5569 +            final BiFun<? super K, ? super V, ? extends U> transformer;
5570 +            final Action<U> action;
5571 +            if ((transformer = this.transformer) == null ||
5572 +                (action = this.action) == null)
5573 +                throw new NullPointerException();
5574 +            for (int b; (b = preSplit()) > 0;)
5575 +                new ForEachTransformedMappingTask<K,V,U>
5576 +                    (map, this, b, transformer, action).fork();
5577 +            Object v; U u;
5578 +            while ((v = advance()) != null) {
5579 +                if ((u = transformer.apply((K)nextKey, (V)v)) != null)
5580 +                    action.apply(u);
5581 +            }
5582 +            propagateCompletion();
5583 +        }
5584 +    }
5585 +
5586 +    @SuppressWarnings("serial") static final class SearchKeysTask<K,V,U>
5587 +        extends Traverser<K,V,U> {
5588 +        final Fun<? super K, ? extends U> searchFunction;
5589 +        final AtomicReference<U> result;
5590 +        SearchKeysTask
5591 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
5592 +             Fun<? super K, ? extends U> searchFunction,
5593 +             AtomicReference<U> result) {
5594 +            super(m, p, b);
5595 +            this.searchFunction = searchFunction; this.result = result;
5596 +        }
5597 +        public final U getRawResult() { return result.get(); }
5598 +        @SuppressWarnings("unchecked") public final void compute() {
5599 +            final Fun<? super K, ? extends U> searchFunction;
5600 +            final AtomicReference<U> result;
5601 +            if ((searchFunction = this.searchFunction) == null ||
5602 +                (result = this.result) == null)
5603 +                throw new NullPointerException();
5604 +            for (int b;;) {
5605 +                if (result.get() != null)
5606 +                    return;
5607 +                if ((b = preSplit()) <= 0)
5608 +                    break;
5609 +                new SearchKeysTask<K,V,U>
5610 +                    (map, this, b, searchFunction, result).fork();
5611 +            }
5612 +            while (result.get() == null) {
5613 +                U u;
5614 +                if (advance() == null) {
5615 +                    propagateCompletion();
5616 +                    break;
5617 +                }
5618 +                if ((u = searchFunction.apply((K)nextKey)) != null) {
5619 +                    if (result.compareAndSet(null, u))
5620 +                        quietlyCompleteRoot();
5621 +                    break;
5622 +                }
5623 +            }
5624 +        }
5625 +    }
5626 +
5627 +    @SuppressWarnings("serial") static final class SearchValuesTask<K,V,U>
5628 +        extends Traverser<K,V,U> {
5629 +        final Fun<? super V, ? extends U> searchFunction;
5630 +        final AtomicReference<U> result;
5631 +        SearchValuesTask
5632 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
5633 +             Fun<? super V, ? extends U> searchFunction,
5634 +             AtomicReference<U> result) {
5635 +            super(m, p, b);
5636 +            this.searchFunction = searchFunction; this.result = result;
5637 +        }
5638 +        public final U getRawResult() { return result.get(); }
5639 +        @SuppressWarnings("unchecked") public final void compute() {
5640 +            final Fun<? super V, ? extends U> searchFunction;
5641 +            final AtomicReference<U> result;
5642 +            if ((searchFunction = this.searchFunction) == null ||
5643 +                (result = this.result) == null)
5644 +                throw new NullPointerException();
5645 +            for (int b;;) {
5646 +                if (result.get() != null)
5647 +                    return;
5648 +                if ((b = preSplit()) <= 0)
5649 +                    break;
5650 +                new SearchValuesTask<K,V,U>
5651 +                    (map, this, b, searchFunction, result).fork();
5652 +            }
5653 +            while (result.get() == null) {
5654 +                Object v; U u;
5655 +                if ((v = advance()) == null) {
5656 +                    propagateCompletion();
5657 +                    break;
5658 +                }
5659 +                if ((u = searchFunction.apply((V)v)) != null) {
5660 +                    if (result.compareAndSet(null, u))
5661 +                        quietlyCompleteRoot();
5662 +                    break;
5663 +                }
5664 +            }
5665 +        }
5666 +    }
5667 +
5668 +    @SuppressWarnings("serial") static final class SearchEntriesTask<K,V,U>
5669 +        extends Traverser<K,V,U> {
5670 +        final Fun<Entry<K,V>, ? extends U> searchFunction;
5671 +        final AtomicReference<U> result;
5672 +        SearchEntriesTask
5673 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
5674 +             Fun<Entry<K,V>, ? extends U> searchFunction,
5675 +             AtomicReference<U> result) {
5676 +            super(m, p, b);
5677 +            this.searchFunction = searchFunction; this.result = result;
5678 +        }
5679 +        public final U getRawResult() { return result.get(); }
5680 +        @SuppressWarnings("unchecked") public final void compute() {
5681 +            final Fun<Entry<K,V>, ? extends U> searchFunction;
5682 +            final AtomicReference<U> result;
5683 +            if ((searchFunction = this.searchFunction) == null ||
5684 +                (result = this.result) == null)
5685 +                throw new NullPointerException();
5686 +            for (int b;;) {
5687 +                if (result.get() != null)
5688 +                    return;
5689 +                if ((b = preSplit()) <= 0)
5690 +                    break;
5691 +                new SearchEntriesTask<K,V,U>
5692 +                    (map, this, b, searchFunction, result).fork();
5693 +            }
5694 +            while (result.get() == null) {
5695 +                Object v; U u;
5696 +                if ((v = advance()) == null) {
5697 +                    propagateCompletion();
5698 +                    break;
5699 +                }
5700 +                if ((u = searchFunction.apply(entryFor((K)nextKey, (V)v))) != null) {
5701 +                    if (result.compareAndSet(null, u))
5702 +                        quietlyCompleteRoot();
5703 +                    return;
5704 +                }
5705 +            }
5706 +        }
5707 +    }
5708 +
5709 +    @SuppressWarnings("serial") static final class SearchMappingsTask<K,V,U>
5710 +        extends Traverser<K,V,U> {
5711 +        final BiFun<? super K, ? super V, ? extends U> searchFunction;
5712 +        final AtomicReference<U> result;
5713 +        SearchMappingsTask
5714 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
5715 +             BiFun<? super K, ? super V, ? extends U> searchFunction,
5716 +             AtomicReference<U> result) {
5717 +            super(m, p, b);
5718 +            this.searchFunction = searchFunction; this.result = result;
5719 +        }
5720 +        public final U getRawResult() { return result.get(); }
5721 +        @SuppressWarnings("unchecked") public final void compute() {
5722 +            final BiFun<? super K, ? super V, ? extends U> searchFunction;
5723 +            final AtomicReference<U> result;
5724 +            if ((searchFunction = this.searchFunction) == null ||
5725 +                (result = this.result) == null)
5726 +                throw new NullPointerException();
5727 +            for (int b;;) {
5728 +                if (result.get() != null)
5729 +                    return;
5730 +                if ((b = preSplit()) <= 0)
5731 +                    break;
5732 +                new SearchMappingsTask<K,V,U>
5733 +                    (map, this, b, searchFunction, result).fork();
5734 +            }
5735 +            while (result.get() == null) {
5736 +                Object v; U u;
5737 +                if ((v = advance()) == null) {
5738 +                    propagateCompletion();
5739 +                    break;
5740 +                }
5741 +                if ((u = searchFunction.apply((K)nextKey, (V)v)) != null) {
5742 +                    if (result.compareAndSet(null, u))
5743 +                        quietlyCompleteRoot();
5744 +                    break;
5745 +                }
5746 +            }
5747 +        }
5748 +    }
5749 +
5750 +    @SuppressWarnings("serial") static final class ReduceKeysTask<K,V>
5751 +        extends Traverser<K,V,K> {
5752 +        final BiFun<? super K, ? super K, ? extends K> reducer;
5753 +        K result;
5754 +        ReduceKeysTask<K,V> rights, nextRight;
5755 +        ReduceKeysTask
5756 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
5757 +             ReduceKeysTask<K,V> nextRight,
5758 +             BiFun<? super K, ? super K, ? extends K> reducer) {
5759 +            super(m, p, b); this.nextRight = nextRight;
5760 +            this.reducer = reducer;
5761 +        }
5762 +        public final K getRawResult() { return result; }
5763 +        @SuppressWarnings("unchecked") public final void compute() {
5764 +            final BiFun<? super K, ? super K, ? extends K> reducer =
5765 +                this.reducer;
5766 +            if (reducer == null)
5767 +                throw new NullPointerException();
5768 +            for (int b; (b = preSplit()) > 0;)
5769 +                (rights = new ReduceKeysTask<K,V>
5770 +                 (map, this, b, rights, reducer)).fork();
5771 +            K r = null;
5772 +            while (advance() != null) {
5773 +                K u = (K)nextKey;
5774 +                r = (r == null) ? u : reducer.apply(r, u);
5775 +            }
5776 +            result = r;
5777 +            CountedCompleter<?> c;
5778 +            for (c = firstComplete(); c != null; c = c.nextComplete()) {
5779 +                ReduceKeysTask<K,V>
5780 +                    t = (ReduceKeysTask<K,V>)c,
5781 +                    s = t.rights;
5782 +                while (s != null) {
5783 +                    K tr, sr;
5784 +                    if ((sr = s.result) != null)
5785 +                        t.result = (((tr = t.result) == null) ? sr :
5786 +                                    reducer.apply(tr, sr));
5787 +                    s = t.rights = s.nextRight;
5788 +                }
5789 +            }
5790 +        }
5791 +    }
5792 +
5793 +    @SuppressWarnings("serial") static final class ReduceValuesTask<K,V>
5794 +        extends Traverser<K,V,V> {
5795 +        final BiFun<? super V, ? super V, ? extends V> reducer;
5796 +        V result;
5797 +        ReduceValuesTask<K,V> rights, nextRight;
5798 +        ReduceValuesTask
5799 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
5800 +             ReduceValuesTask<K,V> nextRight,
5801 +             BiFun<? super V, ? super V, ? extends V> reducer) {
5802 +            super(m, p, b); this.nextRight = nextRight;
5803 +            this.reducer = reducer;
5804 +        }
5805 +        public final V getRawResult() { return result; }
5806 +        @SuppressWarnings("unchecked") public final void compute() {
5807 +            final BiFun<? super V, ? super V, ? extends V> reducer =
5808 +                this.reducer;
5809 +            if (reducer == null)
5810 +                throw new NullPointerException();
5811 +            for (int b; (b = preSplit()) > 0;)
5812 +                (rights = new ReduceValuesTask<K,V>
5813 +                 (map, this, b, rights, reducer)).fork();
5814 +            V r = null;
5815 +            Object v;
5816 +            while ((v = advance()) != null) {
5817 +                V u = (V)v;
5818 +                r = (r == null) ? u : reducer.apply(r, u);
5819 +            }
5820 +            result = r;
5821 +            CountedCompleter<?> c;
5822 +            for (c = firstComplete(); c != null; c = c.nextComplete()) {
5823 +                ReduceValuesTask<K,V>
5824 +                    t = (ReduceValuesTask<K,V>)c,
5825 +                    s = t.rights;
5826 +                while (s != null) {
5827 +                    V tr, sr;
5828 +                    if ((sr = s.result) != null)
5829 +                        t.result = (((tr = t.result) == null) ? sr :
5830 +                                    reducer.apply(tr, sr));
5831 +                    s = t.rights = s.nextRight;
5832 +                }
5833 +            }
5834 +        }
5835 +    }
5836 +
5837 +    @SuppressWarnings("serial") static final class ReduceEntriesTask<K,V>
5838 +        extends Traverser<K,V,Map.Entry<K,V>> {
5839 +        final BiFun<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer;
5840 +        Map.Entry<K,V> result;
5841 +        ReduceEntriesTask<K,V> rights, nextRight;
5842 +        ReduceEntriesTask
5843 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
5844 +             ReduceEntriesTask<K,V> nextRight,
5845 +             BiFun<Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
5846 +            super(m, p, b); this.nextRight = nextRight;
5847 +            this.reducer = reducer;
5848 +        }
5849 +        public final Map.Entry<K,V> getRawResult() { return result; }
5850 +        @SuppressWarnings("unchecked") public final void compute() {
5851 +            final BiFun<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer =
5852 +                this.reducer;
5853 +            if (reducer == null)
5854 +                throw new NullPointerException();
5855 +            for (int b; (b = preSplit()) > 0;)
5856 +                (rights = new ReduceEntriesTask<K,V>
5857 +                 (map, this, b, rights, reducer)).fork();
5858 +            Map.Entry<K,V> r = null;
5859 +            Object v;
5860 +            while ((v = advance()) != null) {
5861 +                Map.Entry<K,V> u = entryFor((K)nextKey, (V)v);
5862 +                r = (r == null) ? u : reducer.apply(r, u);
5863 +            }
5864 +            result = r;
5865 +            CountedCompleter<?> c;
5866 +            for (c = firstComplete(); c != null; c = c.nextComplete()) {
5867 +                ReduceEntriesTask<K,V>
5868 +                    t = (ReduceEntriesTask<K,V>)c,
5869 +                    s = t.rights;
5870 +                while (s != null) {
5871 +                    Map.Entry<K,V> tr, sr;
5872 +                    if ((sr = s.result) != null)
5873 +                        t.result = (((tr = t.result) == null) ? sr :
5874 +                                    reducer.apply(tr, sr));
5875 +                    s = t.rights = s.nextRight;
5876 +                }
5877 +            }
5878 +        }
5879 +    }
5880 +
5881 +    @SuppressWarnings("serial") static final class MapReduceKeysTask<K,V,U>
5882 +        extends Traverser<K,V,U> {
5883 +        final Fun<? super K, ? extends U> transformer;
5884 +        final BiFun<? super U, ? super U, ? extends U> reducer;
5885 +        U result;
5886 +        MapReduceKeysTask<K,V,U> rights, nextRight;
5887 +        MapReduceKeysTask
5888 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
5889 +             MapReduceKeysTask<K,V,U> nextRight,
5890 +             Fun<? super K, ? extends U> transformer,
5891 +             BiFun<? super U, ? super U, ? extends U> reducer) {
5892 +            super(m, p, b); this.nextRight = nextRight;
5893 +            this.transformer = transformer;
5894 +            this.reducer = reducer;
5895 +        }
5896 +        public final U getRawResult() { return result; }
5897 +        @SuppressWarnings("unchecked") public final void compute() {
5898 +            final Fun<? super K, ? extends U> transformer =
5899 +                this.transformer;
5900 +            final BiFun<? super U, ? super U, ? extends U> reducer =
5901 +                this.reducer;
5902 +            if (transformer == null || reducer == null)
5903 +                throw new NullPointerException();
5904 +            for (int b; (b = preSplit()) > 0;)
5905 +                (rights = new MapReduceKeysTask<K,V,U>
5906 +                 (map, this, b, rights, transformer, reducer)).fork();
5907 +            U r = null, u;
5908 +            while (advance() != null) {
5909 +                if ((u = transformer.apply((K)nextKey)) != null)
5910 +                    r = (r == null) ? u : reducer.apply(r, u);
5911 +            }
5912 +            result = r;
5913 +            CountedCompleter<?> c;
5914 +            for (c = firstComplete(); c != null; c = c.nextComplete()) {
5915 +                MapReduceKeysTask<K,V,U>
5916 +                    t = (MapReduceKeysTask<K,V,U>)c,
5917 +                    s = t.rights;
5918 +                while (s != null) {
5919 +                    U tr, sr;
5920 +                    if ((sr = s.result) != null)
5921 +                        t.result = (((tr = t.result) == null) ? sr :
5922 +                                    reducer.apply(tr, sr));
5923 +                    s = t.rights = s.nextRight;
5924 +                }
5925 +            }
5926 +        }
5927 +    }
5928 +
5929 +    @SuppressWarnings("serial") static final class MapReduceValuesTask<K,V,U>
5930 +        extends Traverser<K,V,U> {
5931 +        final Fun<? super V, ? extends U> transformer;
5932 +        final BiFun<? super U, ? super U, ? extends U> reducer;
5933 +        U result;
5934 +        MapReduceValuesTask<K,V,U> rights, nextRight;
5935 +        MapReduceValuesTask
5936 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
5937 +             MapReduceValuesTask<K,V,U> nextRight,
5938 +             Fun<? super V, ? extends U> transformer,
5939 +             BiFun<? super U, ? super U, ? extends U> reducer) {
5940 +            super(m, p, b); this.nextRight = nextRight;
5941 +            this.transformer = transformer;
5942 +            this.reducer = reducer;
5943 +        }
5944 +        public final U getRawResult() { return result; }
5945 +        @SuppressWarnings("unchecked") public final void compute() {
5946 +            final Fun<? super V, ? extends U> transformer =
5947 +                this.transformer;
5948 +            final BiFun<? super U, ? super U, ? extends U> reducer =
5949 +                this.reducer;
5950 +            if (transformer == null || reducer == null)
5951 +                throw new NullPointerException();
5952 +            for (int b; (b = preSplit()) > 0;)
5953 +                (rights = new MapReduceValuesTask<K,V,U>
5954 +                 (map, this, b, rights, transformer, reducer)).fork();
5955 +            U r = null, u;
5956 +            Object v;
5957 +            while ((v = advance()) != null) {
5958 +                if ((u = transformer.apply((V)v)) != null)
5959 +                    r = (r == null) ? u : reducer.apply(r, u);
5960 +            }
5961 +            result = r;
5962 +            CountedCompleter<?> c;
5963 +            for (c = firstComplete(); c != null; c = c.nextComplete()) {
5964 +                MapReduceValuesTask<K,V,U>
5965 +                    t = (MapReduceValuesTask<K,V,U>)c,
5966 +                    s = t.rights;
5967 +                while (s != null) {
5968 +                    U tr, sr;
5969 +                    if ((sr = s.result) != null)
5970 +                        t.result = (((tr = t.result) == null) ? sr :
5971 +                                    reducer.apply(tr, sr));
5972 +                    s = t.rights = s.nextRight;
5973 +                }
5974 +            }
5975 +        }
5976 +    }
5977 +
5978 +    @SuppressWarnings("serial") static final class MapReduceEntriesTask<K,V,U>
5979 +        extends Traverser<K,V,U> {
5980 +        final Fun<Map.Entry<K,V>, ? extends U> transformer;
5981 +        final BiFun<? super U, ? super U, ? extends U> reducer;
5982 +        U result;
5983 +        MapReduceEntriesTask<K,V,U> rights, nextRight;
5984 +        MapReduceEntriesTask
5985 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
5986 +             MapReduceEntriesTask<K,V,U> nextRight,
5987 +             Fun<Map.Entry<K,V>, ? extends U> transformer,
5988 +             BiFun<? super U, ? super U, ? extends U> reducer) {
5989 +            super(m, p, b); this.nextRight = nextRight;
5990 +            this.transformer = transformer;
5991 +            this.reducer = reducer;
5992 +        }
5993 +        public final U getRawResult() { return result; }
5994 +        @SuppressWarnings("unchecked") public final void compute() {
5995 +            final Fun<Map.Entry<K,V>, ? extends U> transformer =
5996 +                this.transformer;
5997 +            final BiFun<? super U, ? super U, ? extends U> reducer =
5998 +                this.reducer;
5999 +            if (transformer == null || reducer == null)
6000 +                throw new NullPointerException();
6001 +            for (int b; (b = preSplit()) > 0;)
6002 +                (rights = new MapReduceEntriesTask<K,V,U>
6003 +                 (map, this, b, rights, transformer, reducer)).fork();
6004 +            U r = null, u;
6005 +            Object v;
6006 +            while ((v = advance()) != null) {
6007 +                if ((u = transformer.apply(entryFor((K)nextKey, (V)v))) != null)
6008 +                    r = (r == null) ? u : reducer.apply(r, u);
6009 +            }
6010 +            result = r;
6011 +            CountedCompleter<?> c;
6012 +            for (c = firstComplete(); c != null; c = c.nextComplete()) {
6013 +                MapReduceEntriesTask<K,V,U>
6014 +                    t = (MapReduceEntriesTask<K,V,U>)c,
6015 +                    s = t.rights;
6016 +                while (s != null) {
6017 +                    U tr, sr;
6018 +                    if ((sr = s.result) != null)
6019 +                        t.result = (((tr = t.result) == null) ? sr :
6020 +                                    reducer.apply(tr, sr));
6021 +                    s = t.rights = s.nextRight;
6022 +                }
6023 +            }
6024 +        }
6025 +    }
6026 +
6027 +    @SuppressWarnings("serial") static final class MapReduceMappingsTask<K,V,U>
6028 +        extends Traverser<K,V,U> {
6029 +        final BiFun<? super K, ? super V, ? extends U> transformer;
6030 +        final BiFun<? super U, ? super U, ? extends U> reducer;
6031 +        U result;
6032 +        MapReduceMappingsTask<K,V,U> rights, nextRight;
6033 +        MapReduceMappingsTask
6034 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
6035 +             MapReduceMappingsTask<K,V,U> nextRight,
6036 +             BiFun<? super K, ? super V, ? extends U> transformer,
6037 +             BiFun<? super U, ? super U, ? extends U> reducer) {
6038 +            super(m, p, b); this.nextRight = nextRight;
6039 +            this.transformer = transformer;
6040 +            this.reducer = reducer;
6041 +        }
6042 +        public final U getRawResult() { return result; }
6043 +        @SuppressWarnings("unchecked") public final void compute() {
6044 +            final BiFun<? super K, ? super V, ? extends U> transformer =
6045 +                this.transformer;
6046 +            final BiFun<? super U, ? super U, ? extends U> reducer =
6047 +                this.reducer;
6048 +            if (transformer == null || reducer == null)
6049 +                throw new NullPointerException();
6050 +            for (int b; (b = preSplit()) > 0;)
6051 +                (rights = new MapReduceMappingsTask<K,V,U>
6052 +                 (map, this, b, rights, transformer, reducer)).fork();
6053 +            U r = null, u;
6054 +            Object v;
6055 +            while ((v = advance()) != null) {
6056 +                if ((u = transformer.apply((K)nextKey, (V)v)) != null)
6057 +                    r = (r == null) ? u : reducer.apply(r, u);
6058 +            }
6059 +            result = r;
6060 +            CountedCompleter<?> c;
6061 +            for (c = firstComplete(); c != null; c = c.nextComplete()) {
6062 +                MapReduceMappingsTask<K,V,U>
6063 +                    t = (MapReduceMappingsTask<K,V,U>)c,
6064 +                    s = t.rights;
6065 +                while (s != null) {
6066 +                    U tr, sr;
6067 +                    if ((sr = s.result) != null)
6068 +                        t.result = (((tr = t.result) == null) ? sr :
6069 +                                    reducer.apply(tr, sr));
6070 +                    s = t.rights = s.nextRight;
6071 +                }
6072 +            }
6073 +        }
6074 +    }
6075 +
6076 +    @SuppressWarnings("serial") static final class MapReduceKeysToDoubleTask<K,V>
6077 +        extends Traverser<K,V,Double> {
6078 +        final ObjectToDouble<? super K> transformer;
6079 +        final DoubleByDoubleToDouble reducer;
6080 +        final double basis;
6081 +        double result;
6082 +        MapReduceKeysToDoubleTask<K,V> rights, nextRight;
6083 +        MapReduceKeysToDoubleTask
6084 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
6085 +             MapReduceKeysToDoubleTask<K,V> nextRight,
6086 +             ObjectToDouble<? super K> transformer,
6087 +             double basis,
6088 +             DoubleByDoubleToDouble reducer) {
6089 +            super(m, p, b); this.nextRight = nextRight;
6090 +            this.transformer = transformer;
6091 +            this.basis = basis; this.reducer = reducer;
6092 +        }
6093 +        public final Double getRawResult() { return result; }
6094 +        @SuppressWarnings("unchecked") public final void compute() {
6095 +            final ObjectToDouble<? super K> transformer =
6096 +                this.transformer;
6097 +            final DoubleByDoubleToDouble reducer = this.reducer;
6098 +            if (transformer == null || reducer == null)
6099 +                throw new NullPointerException();
6100 +            double r = this.basis;
6101 +            for (int b; (b = preSplit()) > 0;)
6102 +                (rights = new MapReduceKeysToDoubleTask<K,V>
6103 +                 (map, this, b, rights, transformer, r, reducer)).fork();
6104 +            while (advance() != null)
6105 +                r = reducer.apply(r, transformer.apply((K)nextKey));
6106 +            result = r;
6107 +            CountedCompleter<?> c;
6108 +            for (c = firstComplete(); c != null; c = c.nextComplete()) {
6109 +                MapReduceKeysToDoubleTask<K,V>
6110 +                    t = (MapReduceKeysToDoubleTask<K,V>)c,
6111 +                    s = t.rights;
6112 +                while (s != null) {
6113 +                    t.result = reducer.apply(t.result, s.result);
6114 +                    s = t.rights = s.nextRight;
6115 +                }
6116 +            }
6117 +        }
6118 +    }
6119 +
6120 +    @SuppressWarnings("serial") static final class MapReduceValuesToDoubleTask<K,V>
6121 +        extends Traverser<K,V,Double> {
6122 +        final ObjectToDouble<? super V> transformer;
6123 +        final DoubleByDoubleToDouble reducer;
6124 +        final double basis;
6125 +        double result;
6126 +        MapReduceValuesToDoubleTask<K,V> rights, nextRight;
6127 +        MapReduceValuesToDoubleTask
6128 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
6129 +             MapReduceValuesToDoubleTask<K,V> nextRight,
6130 +             ObjectToDouble<? super V> transformer,
6131 +             double basis,
6132 +             DoubleByDoubleToDouble reducer) {
6133 +            super(m, p, b); this.nextRight = nextRight;
6134 +            this.transformer = transformer;
6135 +            this.basis = basis; this.reducer = reducer;
6136 +        }
6137 +        public final Double getRawResult() { return result; }
6138 +        @SuppressWarnings("unchecked") public final void compute() {
6139 +            final ObjectToDouble<? super V> transformer =
6140 +                this.transformer;
6141 +            final DoubleByDoubleToDouble reducer = this.reducer;
6142 +            if (transformer == null || reducer == null)
6143 +                throw new NullPointerException();
6144 +            double r = this.basis;
6145 +            for (int b; (b = preSplit()) > 0;)
6146 +                (rights = new MapReduceValuesToDoubleTask<K,V>
6147 +                 (map, this, b, rights, transformer, r, reducer)).fork();
6148 +            Object v;
6149 +            while ((v = advance()) != null)
6150 +                r = reducer.apply(r, transformer.apply((V)v));
6151 +            result = r;
6152 +            CountedCompleter<?> c;
6153 +            for (c = firstComplete(); c != null; c = c.nextComplete()) {
6154 +                MapReduceValuesToDoubleTask<K,V>
6155 +                    t = (MapReduceValuesToDoubleTask<K,V>)c,
6156 +                    s = t.rights;
6157 +                while (s != null) {
6158 +                    t.result = reducer.apply(t.result, s.result);
6159 +                    s = t.rights = s.nextRight;
6160 +                }
6161 +            }
6162 +        }
6163 +    }
6164 +
6165 +    @SuppressWarnings("serial") static final class MapReduceEntriesToDoubleTask<K,V>
6166 +        extends Traverser<K,V,Double> {
6167 +        final ObjectToDouble<Map.Entry<K,V>> transformer;
6168 +        final DoubleByDoubleToDouble reducer;
6169 +        final double basis;
6170 +        double result;
6171 +        MapReduceEntriesToDoubleTask<K,V> rights, nextRight;
6172 +        MapReduceEntriesToDoubleTask
6173 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
6174 +             MapReduceEntriesToDoubleTask<K,V> nextRight,
6175 +             ObjectToDouble<Map.Entry<K,V>> transformer,
6176 +             double basis,
6177 +             DoubleByDoubleToDouble reducer) {
6178 +            super(m, p, b); this.nextRight = nextRight;
6179 +            this.transformer = transformer;
6180 +            this.basis = basis; this.reducer = reducer;
6181 +        }
6182 +        public final Double getRawResult() { return result; }
6183 +        @SuppressWarnings("unchecked") public final void compute() {
6184 +            final ObjectToDouble<Map.Entry<K,V>> transformer =
6185 +                this.transformer;
6186 +            final DoubleByDoubleToDouble reducer = this.reducer;
6187 +            if (transformer == null || reducer == null)
6188 +                throw new NullPointerException();
6189 +            double r = this.basis;
6190 +            for (int b; (b = preSplit()) > 0;)
6191 +                (rights = new MapReduceEntriesToDoubleTask<K,V>
6192 +                 (map, this, b, rights, transformer, r, reducer)).fork();
6193 +            Object v;
6194 +            while ((v = advance()) != null)
6195 +                r = reducer.apply(r, transformer.apply(entryFor((K)nextKey, (V)v)));
6196 +            result = r;
6197 +            CountedCompleter<?> c;
6198 +            for (c = firstComplete(); c != null; c = c.nextComplete()) {
6199 +                MapReduceEntriesToDoubleTask<K,V>
6200 +                    t = (MapReduceEntriesToDoubleTask<K,V>)c,
6201 +                    s = t.rights;
6202 +                while (s != null) {
6203 +                    t.result = reducer.apply(t.result, s.result);
6204 +                    s = t.rights = s.nextRight;
6205 +                }
6206 +            }
6207 +        }
6208 +    }
6209 +
6210 +    @SuppressWarnings("serial") static final class MapReduceMappingsToDoubleTask<K,V>
6211 +        extends Traverser<K,V,Double> {
6212 +        final ObjectByObjectToDouble<? super K, ? super V> transformer;
6213 +        final DoubleByDoubleToDouble reducer;
6214 +        final double basis;
6215 +        double result;
6216 +        MapReduceMappingsToDoubleTask<K,V> rights, nextRight;
6217 +        MapReduceMappingsToDoubleTask
6218 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
6219 +             MapReduceMappingsToDoubleTask<K,V> nextRight,
6220 +             ObjectByObjectToDouble<? super K, ? super V> transformer,
6221 +             double basis,
6222 +             DoubleByDoubleToDouble reducer) {
6223 +            super(m, p, b); this.nextRight = nextRight;
6224 +            this.transformer = transformer;
6225 +            this.basis = basis; this.reducer = reducer;
6226 +        }
6227 +        public final Double getRawResult() { return result; }
6228 +        @SuppressWarnings("unchecked") public final void compute() {
6229 +            final ObjectByObjectToDouble<? super K, ? super V> transformer =
6230 +                this.transformer;
6231 +            final DoubleByDoubleToDouble reducer = this.reducer;
6232 +            if (transformer == null || reducer == null)
6233 +                throw new NullPointerException();
6234 +            double r = this.basis;
6235 +            for (int b; (b = preSplit()) > 0;)
6236 +                (rights = new MapReduceMappingsToDoubleTask<K,V>
6237 +                 (map, this, b, rights, transformer, r, reducer)).fork();
6238 +            Object v;
6239 +            while ((v = advance()) != null)
6240 +                r = reducer.apply(r, transformer.apply((K)nextKey, (V)v));
6241 +            result = r;
6242 +            CountedCompleter<?> c;
6243 +            for (c = firstComplete(); c != null; c = c.nextComplete()) {
6244 +                MapReduceMappingsToDoubleTask<K,V>
6245 +                    t = (MapReduceMappingsToDoubleTask<K,V>)c,
6246 +                    s = t.rights;
6247 +                while (s != null) {
6248 +                    t.result = reducer.apply(t.result, s.result);
6249 +                    s = t.rights = s.nextRight;
6250 +                }
6251 +            }
6252 +        }
6253 +    }
6254 +
6255 +    @SuppressWarnings("serial") static final class MapReduceKeysToLongTask<K,V>
6256 +        extends Traverser<K,V,Long> {
6257 +        final ObjectToLong<? super K> transformer;
6258 +        final LongByLongToLong reducer;
6259 +        final long basis;
6260 +        long result;
6261 +        MapReduceKeysToLongTask<K,V> rights, nextRight;
6262 +        MapReduceKeysToLongTask
6263 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
6264 +             MapReduceKeysToLongTask<K,V> nextRight,
6265 +             ObjectToLong<? super K> transformer,
6266 +             long basis,
6267 +             LongByLongToLong reducer) {
6268 +            super(m, p, b); this.nextRight = nextRight;
6269 +            this.transformer = transformer;
6270 +            this.basis = basis; this.reducer = reducer;
6271 +        }
6272 +        public final Long getRawResult() { return result; }
6273 +        @SuppressWarnings("unchecked") public final void compute() {
6274 +            final ObjectToLong<? super K> transformer =
6275 +                this.transformer;
6276 +            final LongByLongToLong reducer = this.reducer;
6277 +            if (transformer == null || reducer == null)
6278 +                throw new NullPointerException();
6279 +            long r = this.basis;
6280 +            for (int b; (b = preSplit()) > 0;)
6281 +                (rights = new MapReduceKeysToLongTask<K,V>
6282 +                 (map, this, b, rights, transformer, r, reducer)).fork();
6283 +            while (advance() != null)
6284 +                r = reducer.apply(r, transformer.apply((K)nextKey));
6285 +            result = r;
6286 +            CountedCompleter<?> c;
6287 +            for (c = firstComplete(); c != null; c = c.nextComplete()) {
6288 +                MapReduceKeysToLongTask<K,V>
6289 +                    t = (MapReduceKeysToLongTask<K,V>)c,
6290 +                    s = t.rights;
6291 +                while (s != null) {
6292 +                    t.result = reducer.apply(t.result, s.result);
6293 +                    s = t.rights = s.nextRight;
6294 +                }
6295 +            }
6296 +        }
6297 +    }
6298 +
6299 +    @SuppressWarnings("serial") static final class MapReduceValuesToLongTask<K,V>
6300 +        extends Traverser<K,V,Long> {
6301 +        final ObjectToLong<? super V> transformer;
6302 +        final LongByLongToLong reducer;
6303 +        final long basis;
6304 +        long result;
6305 +        MapReduceValuesToLongTask<K,V> rights, nextRight;
6306 +        MapReduceValuesToLongTask
6307 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
6308 +             MapReduceValuesToLongTask<K,V> nextRight,
6309 +             ObjectToLong<? super V> transformer,
6310 +             long basis,
6311 +             LongByLongToLong reducer) {
6312 +            super(m, p, b); this.nextRight = nextRight;
6313 +            this.transformer = transformer;
6314 +            this.basis = basis; this.reducer = reducer;
6315 +        }
6316 +        public final Long getRawResult() { return result; }
6317 +        @SuppressWarnings("unchecked") public final void compute() {
6318 +            final ObjectToLong<? super V> transformer =
6319 +                this.transformer;
6320 +            final LongByLongToLong reducer = this.reducer;
6321 +            if (transformer == null || reducer == null)
6322 +                throw new NullPointerException();
6323 +            long r = this.basis;
6324 +            for (int b; (b = preSplit()) > 0;)
6325 +                (rights = new MapReduceValuesToLongTask<K,V>
6326 +                 (map, this, b, rights, transformer, r, reducer)).fork();
6327 +            Object v;
6328 +            while ((v = advance()) != null)
6329 +                r = reducer.apply(r, transformer.apply((V)v));
6330 +            result = r;
6331 +            CountedCompleter<?> c;
6332 +            for (c = firstComplete(); c != null; c = c.nextComplete()) {
6333 +                MapReduceValuesToLongTask<K,V>
6334 +                    t = (MapReduceValuesToLongTask<K,V>)c,
6335 +                    s = t.rights;
6336 +                while (s != null) {
6337 +                    t.result = reducer.apply(t.result, s.result);
6338 +                    s = t.rights = s.nextRight;
6339 +                }
6340 +            }
6341 +        }
6342 +    }
6343 +
6344 +    @SuppressWarnings("serial") static final class MapReduceEntriesToLongTask<K,V>
6345 +        extends Traverser<K,V,Long> {
6346 +        final ObjectToLong<Map.Entry<K,V>> transformer;
6347 +        final LongByLongToLong reducer;
6348 +        final long basis;
6349 +        long result;
6350 +        MapReduceEntriesToLongTask<K,V> rights, nextRight;
6351 +        MapReduceEntriesToLongTask
6352 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
6353 +             MapReduceEntriesToLongTask<K,V> nextRight,
6354 +             ObjectToLong<Map.Entry<K,V>> transformer,
6355 +             long basis,
6356 +             LongByLongToLong reducer) {
6357 +            super(m, p, b); this.nextRight = nextRight;
6358 +            this.transformer = transformer;
6359 +            this.basis = basis; this.reducer = reducer;
6360 +        }
6361 +        public final Long getRawResult() { return result; }
6362 +        @SuppressWarnings("unchecked") public final void compute() {
6363 +            final ObjectToLong<Map.Entry<K,V>> transformer =
6364 +                this.transformer;
6365 +            final LongByLongToLong reducer = this.reducer;
6366 +            if (transformer == null || reducer == null)
6367 +                throw new NullPointerException();
6368 +            long r = this.basis;
6369 +            for (int b; (b = preSplit()) > 0;)
6370 +                (rights = new MapReduceEntriesToLongTask<K,V>
6371 +                 (map, this, b, rights, transformer, r, reducer)).fork();
6372 +            Object v;
6373 +            while ((v = advance()) != null)
6374 +                r = reducer.apply(r, transformer.apply(entryFor((K)nextKey, (V)v)));
6375 +            result = r;
6376 +            CountedCompleter<?> c;
6377 +            for (c = firstComplete(); c != null; c = c.nextComplete()) {
6378 +                MapReduceEntriesToLongTask<K,V>
6379 +                    t = (MapReduceEntriesToLongTask<K,V>)c,
6380 +                    s = t.rights;
6381 +                while (s != null) {
6382 +                    t.result = reducer.apply(t.result, s.result);
6383 +                    s = t.rights = s.nextRight;
6384 +                }
6385 +            }
6386 +        }
6387 +    }
6388 +
6389 +    @SuppressWarnings("serial") static final class MapReduceMappingsToLongTask<K,V>
6390 +        extends Traverser<K,V,Long> {
6391 +        final ObjectByObjectToLong<? super K, ? super V> transformer;
6392 +        final LongByLongToLong reducer;
6393 +        final long basis;
6394 +        long result;
6395 +        MapReduceMappingsToLongTask<K,V> rights, nextRight;
6396 +        MapReduceMappingsToLongTask
6397 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
6398 +             MapReduceMappingsToLongTask<K,V> nextRight,
6399 +             ObjectByObjectToLong<? super K, ? super V> transformer,
6400 +             long basis,
6401 +             LongByLongToLong reducer) {
6402 +            super(m, p, b); this.nextRight = nextRight;
6403 +            this.transformer = transformer;
6404 +            this.basis = basis; this.reducer = reducer;
6405 +        }
6406 +        public final Long getRawResult() { return result; }
6407 +        @SuppressWarnings("unchecked") public final void compute() {
6408 +            final ObjectByObjectToLong<? super K, ? super V> transformer =
6409 +                this.transformer;
6410 +            final LongByLongToLong reducer = this.reducer;
6411 +            if (transformer == null || reducer == null)
6412 +                throw new NullPointerException();
6413 +            long r = this.basis;
6414 +            for (int b; (b = preSplit()) > 0;)
6415 +                (rights = new MapReduceMappingsToLongTask<K,V>
6416 +                 (map, this, b, rights, transformer, r, reducer)).fork();
6417 +            Object v;
6418 +            while ((v = advance()) != null)
6419 +                r = reducer.apply(r, transformer.apply((K)nextKey, (V)v));
6420 +            result = r;
6421 +            CountedCompleter<?> c;
6422 +            for (c = firstComplete(); c != null; c = c.nextComplete()) {
6423 +                MapReduceMappingsToLongTask<K,V>
6424 +                    t = (MapReduceMappingsToLongTask<K,V>)c,
6425 +                    s = t.rights;
6426 +                while (s != null) {
6427 +                    t.result = reducer.apply(t.result, s.result);
6428 +                    s = t.rights = s.nextRight;
6429 +                }
6430 +            }
6431 +        }
6432 +    }
6433 +
6434 +    @SuppressWarnings("serial") static final class MapReduceKeysToIntTask<K,V>
6435 +        extends Traverser<K,V,Integer> {
6436 +        final ObjectToInt<? super K> transformer;
6437 +        final IntByIntToInt reducer;
6438 +        final int basis;
6439 +        int result;
6440 +        MapReduceKeysToIntTask<K,V> rights, nextRight;
6441 +        MapReduceKeysToIntTask
6442 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
6443 +             MapReduceKeysToIntTask<K,V> nextRight,
6444 +             ObjectToInt<? super K> transformer,
6445 +             int basis,
6446 +             IntByIntToInt reducer) {
6447 +            super(m, p, b); this.nextRight = nextRight;
6448 +            this.transformer = transformer;
6449 +            this.basis = basis; this.reducer = reducer;
6450 +        }
6451 +        public final Integer getRawResult() { return result; }
6452 +        @SuppressWarnings("unchecked") public final void compute() {
6453 +            final ObjectToInt<? super K> transformer =
6454 +                this.transformer;
6455 +            final IntByIntToInt reducer = this.reducer;
6456 +            if (transformer == null || reducer == null)
6457 +                throw new NullPointerException();
6458 +            int r = this.basis;
6459 +            for (int b; (b = preSplit()) > 0;)
6460 +                (rights = new MapReduceKeysToIntTask<K,V>
6461 +                 (map, this, b, rights, transformer, r, reducer)).fork();
6462 +            while (advance() != null)
6463 +                r = reducer.apply(r, transformer.apply((K)nextKey));
6464 +            result = r;
6465 +            CountedCompleter<?> c;
6466 +            for (c = firstComplete(); c != null; c = c.nextComplete()) {
6467 +                MapReduceKeysToIntTask<K,V>
6468 +                    t = (MapReduceKeysToIntTask<K,V>)c,
6469 +                    s = t.rights;
6470 +                while (s != null) {
6471 +                    t.result = reducer.apply(t.result, s.result);
6472 +                    s = t.rights = s.nextRight;
6473 +                }
6474 +            }
6475 +        }
6476 +    }
6477 +
6478 +    @SuppressWarnings("serial") static final class MapReduceValuesToIntTask<K,V>
6479 +        extends Traverser<K,V,Integer> {
6480 +        final ObjectToInt<? super V> transformer;
6481 +        final IntByIntToInt reducer;
6482 +        final int basis;
6483 +        int result;
6484 +        MapReduceValuesToIntTask<K,V> rights, nextRight;
6485 +        MapReduceValuesToIntTask
6486 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
6487 +             MapReduceValuesToIntTask<K,V> nextRight,
6488 +             ObjectToInt<? super V> transformer,
6489 +             int basis,
6490 +             IntByIntToInt reducer) {
6491 +            super(m, p, b); this.nextRight = nextRight;
6492 +            this.transformer = transformer;
6493 +            this.basis = basis; this.reducer = reducer;
6494 +        }
6495 +        public final Integer getRawResult() { return result; }
6496 +        @SuppressWarnings("unchecked") public final void compute() {
6497 +            final ObjectToInt<? super V> transformer =
6498 +                this.transformer;
6499 +            final IntByIntToInt reducer = this.reducer;
6500 +            if (transformer == null || reducer == null)
6501 +                throw new NullPointerException();
6502 +            int r = this.basis;
6503 +            for (int b; (b = preSplit()) > 0;)
6504 +                (rights = new MapReduceValuesToIntTask<K,V>
6505 +                 (map, this, b, rights, transformer, r, reducer)).fork();
6506 +            Object v;
6507 +            while ((v = advance()) != null)
6508 +                r = reducer.apply(r, transformer.apply((V)v));
6509 +            result = r;
6510 +            CountedCompleter<?> c;
6511 +            for (c = firstComplete(); c != null; c = c.nextComplete()) {
6512 +                MapReduceValuesToIntTask<K,V>
6513 +                    t = (MapReduceValuesToIntTask<K,V>)c,
6514 +                    s = t.rights;
6515 +                while (s != null) {
6516 +                    t.result = reducer.apply(t.result, s.result);
6517 +                    s = t.rights = s.nextRight;
6518 +                }
6519 +            }
6520 +        }
6521 +    }
6522 +
6523 +    @SuppressWarnings("serial") static final class MapReduceEntriesToIntTask<K,V>
6524 +        extends Traverser<K,V,Integer> {
6525 +        final ObjectToInt<Map.Entry<K,V>> transformer;
6526 +        final IntByIntToInt reducer;
6527 +        final int basis;
6528 +        int result;
6529 +        MapReduceEntriesToIntTask<K,V> rights, nextRight;
6530 +        MapReduceEntriesToIntTask
6531 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
6532 +             MapReduceEntriesToIntTask<K,V> nextRight,
6533 +             ObjectToInt<Map.Entry<K,V>> transformer,
6534 +             int basis,
6535 +             IntByIntToInt reducer) {
6536 +            super(m, p, b); this.nextRight = nextRight;
6537 +            this.transformer = transformer;
6538 +            this.basis = basis; this.reducer = reducer;
6539 +        }
6540 +        public final Integer getRawResult() { return result; }
6541 +        @SuppressWarnings("unchecked") public final void compute() {
6542 +            final ObjectToInt<Map.Entry<K,V>> transformer =
6543 +                this.transformer;
6544 +            final IntByIntToInt reducer = this.reducer;
6545 +            if (transformer == null || reducer == null)
6546 +                throw new NullPointerException();
6547 +            int r = this.basis;
6548 +            for (int b; (b = preSplit()) > 0;)
6549 +                (rights = new MapReduceEntriesToIntTask<K,V>
6550 +                 (map, this, b, rights, transformer, r, reducer)).fork();
6551 +            Object v;
6552 +            while ((v = advance()) != null)
6553 +                r = reducer.apply(r, transformer.apply(entryFor((K)nextKey, (V)v)));
6554 +            result = r;
6555 +            CountedCompleter<?> c;
6556 +            for (c = firstComplete(); c != null; c = c.nextComplete()) {
6557 +                MapReduceEntriesToIntTask<K,V>
6558 +                    t = (MapReduceEntriesToIntTask<K,V>)c,
6559 +                    s = t.rights;
6560 +                while (s != null) {
6561 +                    t.result = reducer.apply(t.result, s.result);
6562 +                    s = t.rights = s.nextRight;
6563 +                }
6564 +            }
6565 +        }
6566 +    }
6567 +
6568 +    @SuppressWarnings("serial") static final class MapReduceMappingsToIntTask<K,V>
6569 +        extends Traverser<K,V,Integer> {
6570 +        final ObjectByObjectToInt<? super K, ? super V> transformer;
6571 +        final IntByIntToInt reducer;
6572 +        final int basis;
6573 +        int result;
6574 +        MapReduceMappingsToIntTask<K,V> rights, nextRight;
6575 +        MapReduceMappingsToIntTask
6576 +            (ConcurrentHashMapV8<K,V> m, Traverser<K,V,?> p, int b,
6577 +             MapReduceMappingsToIntTask<K,V> nextRight,
6578 +             ObjectByObjectToInt<? super K, ? super V> transformer,
6579 +             int basis,
6580 +             IntByIntToInt reducer) {
6581 +            super(m, p, b); this.nextRight = nextRight;
6582 +            this.transformer = transformer;
6583 +            this.basis = basis; this.reducer = reducer;
6584 +        }
6585 +        public final Integer getRawResult() { return result; }
6586 +        @SuppressWarnings("unchecked") public final void compute() {
6587 +            final ObjectByObjectToInt<? super K, ? super V> transformer =
6588 +                this.transformer;
6589 +            final IntByIntToInt reducer = this.reducer;
6590 +            if (transformer == null || reducer == null)
6591 +                throw new NullPointerException();
6592 +            int r = this.basis;
6593 +            for (int b; (b = preSplit()) > 0;)
6594 +                (rights = new MapReduceMappingsToIntTask<K,V>
6595 +                 (map, this, b, rights, transformer, r, reducer)).fork();
6596 +            Object v;
6597 +            while ((v = advance()) != null)
6598 +                r = reducer.apply(r, transformer.apply((K)nextKey, (V)v));
6599 +            result = r;
6600 +            CountedCompleter<?> c;
6601 +            for (c = firstComplete(); c != null; c = c.nextComplete()) {
6602 +                MapReduceMappingsToIntTask<K,V>
6603 +                    t = (MapReduceMappingsToIntTask<K,V>)c,
6604 +                    s = t.rights;
6605 +                while (s != null) {
6606 +                    t.result = reducer.apply(t.result, s.result);
6607 +                    s = t.rights = s.nextRight;
6608 +                }
6609 +            }
6610 +        }
6611 +    }
6612 +
6613      // Unsafe mechanics
6614      private static final sun.misc.Unsafe UNSAFE;
6615      private static final long counterOffset;
6616 <    private static final long resizingOffset;
6616 >    private static final long sizeCtlOffset;
6617      private static final long ABASE;
6618      private static final int ASHIFT;
6619  
# Line 1695 | Line 6624 | public class ConcurrentHashMapV8<K, V>
6624              Class<?> k = ConcurrentHashMapV8.class;
6625              counterOffset = UNSAFE.objectFieldOffset
6626                  (k.getDeclaredField("counter"));
6627 <            resizingOffset = UNSAFE.objectFieldOffset
6628 <                (k.getDeclaredField("resizing"));
6627 >            sizeCtlOffset = UNSAFE.objectFieldOffset
6628 >                (k.getDeclaredField("sizeCtl"));
6629              Class<?> sc = Node[].class;
6630              ABASE = UNSAFE.arrayBaseOffset(sc);
6631              ss = UNSAFE.arrayIndexScale(sc);
# Line 1735 | Line 6664 | public class ConcurrentHashMapV8<K, V>
6664              }
6665          }
6666      }
1738
6667   }

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines