ViewVC Help
View File | Revision Log | Show Annotations | Download File | Root Listing
root/jsr166/jsr166/src/main/java/util/concurrent/ConcurrentHashMap.java
Revision: 1.155
Committed: Thu Dec 27 20:47:47 2012 UTC (11 years, 5 months ago) by dl
Branch: MAIN
Changes since 1.154: +9 -13 lines
Log Message:
Track lambda-lib API changes

File Contents

# Content
1 /*
2 * Written by Doug Lea with assistance from members of JCP JSR-166
3 * Expert Group and released to the public domain, as explained at
4 * http://creativecommons.org/publicdomain/zero/1.0/
5 */
6
7 package java.util.concurrent;
8 import java.util.concurrent.ForkJoinPool;
9 import java.util.concurrent.CountedCompleter;
10 import java.util.function.*;
11 import java.util.stream.Spliterator;
12 import java.util.stream.Stream;
13 import java.util.stream.Streams;
14
15 import java.util.Comparator;
16 import java.util.Arrays;
17 import java.util.Map;
18 import java.util.Set;
19 import java.util.Collection;
20 import java.util.AbstractMap;
21 import java.util.AbstractSet;
22 import java.util.AbstractCollection;
23 import java.util.Hashtable;
24 import java.util.HashMap;
25 import java.util.Iterator;
26 import java.util.Enumeration;
27 import java.util.ConcurrentModificationException;
28 import java.util.NoSuchElementException;
29 import java.util.concurrent.ConcurrentMap;
30 import java.util.concurrent.locks.AbstractQueuedSynchronizer;
31 import java.util.concurrent.atomic.AtomicInteger;
32 import java.util.concurrent.atomic.AtomicReference;
33 import java.io.Serializable;
34
35 /**
36 * A hash table supporting full concurrency of retrievals and
37 * high expected concurrency for updates. This class obeys the
38 * same functional specification as {@link java.util.Hashtable}, and
39 * includes versions of methods corresponding to each method of
40 * {@code Hashtable}. However, even though all operations are
41 * thread-safe, retrieval operations do <em>not</em> entail locking,
42 * and there is <em>not</em> any support for locking the entire table
43 * in a way that prevents all access. This class is fully
44 * interoperable with {@code Hashtable} in programs that rely on its
45 * thread safety but not on its synchronization details.
46 *
47 * <p>Retrieval operations (including {@code get}) generally do not
48 * block, so may overlap with update operations (including {@code put}
49 * and {@code remove}). Retrievals reflect the results of the most
50 * recently <em>completed</em> update operations holding upon their
51 * onset. (More formally, an update operation for a given key bears a
52 * <em>happens-before</em> relation with any (non-null) retrieval for
53 * that key reporting the updated value.) For aggregate operations
54 * such as {@code putAll} and {@code clear}, concurrent retrievals may
55 * reflect insertion or removal of only some entries. Similarly,
56 * Iterators and Enumerations return elements reflecting the state of
57 * the hash table at some point at or since the creation of the
58 * iterator/enumeration. They do <em>not</em> throw {@link
59 * ConcurrentModificationException}. However, iterators are designed
60 * to be used by only one thread at a time. Bear in mind that the
61 * results of aggregate status methods including {@code size}, {@code
62 * isEmpty}, and {@code containsValue} are typically useful only when
63 * a map is not undergoing concurrent updates in other threads.
64 * Otherwise the results of these methods reflect transient states
65 * that may be adequate for monitoring or estimation purposes, but not
66 * for program control.
67 *
68 * <p>The table is dynamically expanded when there are too many
69 * collisions (i.e., keys that have distinct hash codes but fall into
70 * the same slot modulo the table size), with the expected average
71 * effect of maintaining roughly two bins per mapping (corresponding
72 * to a 0.75 load factor threshold for resizing). There may be much
73 * variance around this average as mappings are added and removed, but
74 * overall, this maintains a commonly accepted time/space tradeoff for
75 * hash tables. However, resizing this or any other kind of hash
76 * table may be a relatively slow operation. When possible, it is a
77 * good idea to provide a size estimate as an optional {@code
78 * initialCapacity} constructor argument. An additional optional
79 * {@code loadFactor} constructor argument provides a further means of
80 * customizing initial table capacity by specifying the table density
81 * to be used in calculating the amount of space to allocate for the
82 * given number of elements. Also, for compatibility with previous
83 * versions of this class, constructors may optionally specify an
84 * expected {@code concurrencyLevel} as an additional hint for
85 * internal sizing. Note that using many keys with exactly the same
86 * {@code hashCode()} is a sure way to slow down performance of any
87 * hash table.
88 *
89 * <p>A {@link Set} projection of a ConcurrentHashMap may be created
90 * (using {@link #newKeySet()} or {@link #newKeySet(int)}), or viewed
91 * (using {@link #keySet(Object)} when only keys are of interest, and the
92 * mapped values are (perhaps transiently) not used or all take the
93 * same mapping value.
94 *
95 * <p>A ConcurrentHashMap can be used as scalable frequency map (a
96 * form of histogram or multiset) by using {@link
97 * java.util.concurrent.atomic.LongAdder} values and initializing via
98 * {@link #computeIfAbsent}. For example, to add a count to a {@code
99 * ConcurrentHashMap<String,LongAdder> freqs}, you can use {@code
100 * freqs.computeIfAbsent(k -> new LongAdder()).increment();}
101 *
102 * <p>This class and its views and iterators implement all of the
103 * <em>optional</em> methods of the {@link Map} and {@link Iterator}
104 * interfaces.
105 *
106 * <p>Like {@link Hashtable} but unlike {@link HashMap}, this class
107 * does <em>not</em> allow {@code null} to be used as a key or value.
108 *
109 * <p>ConcurrentHashMaps support sequential and parallel operations
110 * bulk operations. (Parallel forms use the {@link
111 * ForkJoinPool#commonPool()}). Tasks that may be used in other
112 * contexts are available in class {@link ForkJoinTasks}. These
113 * operations are designed to be safely, and often sensibly, applied
114 * even with maps that are being concurrently updated by other
115 * threads; for example, when computing a snapshot summary of the
116 * values in a shared registry. There are three kinds of operation,
117 * each with four forms, accepting functions with Keys, Values,
118 * Entries, and (Key, Value) arguments and/or return values. Because
119 * the elements of a ConcurrentHashMap are not ordered in any
120 * particular way, and may be processed in different orders in
121 * different parallel executions, the correctness of supplied
122 * functions should not depend on any ordering, or on any other
123 * objects or values that may transiently change while computation is
124 * in progress; and except for forEach actions, should ideally be
125 * side-effect-free.
126 *
127 * <ul>
128 * <li> forEach: Perform a given action on each element.
129 * A variant form applies a given transformation on each element
130 * before performing the action.</li>
131 *
132 * <li> search: Return the first available non-null result of
133 * applying a given function on each element; skipping further
134 * search when a result is found.</li>
135 *
136 * <li> reduce: Accumulate each element. The supplied reduction
137 * function cannot rely on ordering (more formally, it should be
138 * both associative and commutative). There are five variants:
139 *
140 * <ul>
141 *
142 * <li> Plain reductions. (There is not a form of this method for
143 * (key, value) function arguments since there is no corresponding
144 * return type.)</li>
145 *
146 * <li> Mapped reductions that accumulate the results of a given
147 * function applied to each element.</li>
148 *
149 * <li> Reductions to scalar doubles, longs, and ints, using a
150 * given basis value.</li>
151 *
152 * </li>
153 * </ul>
154 * </ul>
155 *
156 * <p>The concurrency properties of bulk operations follow
157 * from those of ConcurrentHashMap: Any non-null result returned
158 * from {@code get(key)} and related access methods bears a
159 * happens-before relation with the associated insertion or
160 * update. The result of any bulk operation reflects the
161 * composition of these per-element relations (but is not
162 * necessarily atomic with respect to the map as a whole unless it
163 * is somehow known to be quiescent). Conversely, because keys
164 * and values in the map are never null, null serves as a reliable
165 * atomic indicator of the current lack of any result. To
166 * maintain this property, null serves as an implicit basis for
167 * all non-scalar reduction operations. For the double, long, and
168 * int versions, the basis should be one that, when combined with
169 * any other value, returns that other value (more formally, it
170 * should be the identity element for the reduction). Most common
171 * reductions have these properties; for example, computing a sum
172 * with basis 0 or a minimum with basis MAX_VALUE.
173 *
174 * <p>Search and transformation functions provided as arguments
175 * should similarly return null to indicate the lack of any result
176 * (in which case it is not used). In the case of mapped
177 * reductions, this also enables transformations to serve as
178 * filters, returning null (or, in the case of primitive
179 * specializations, the identity basis) if the element should not
180 * be combined. You can create compound transformations and
181 * filterings by composing them yourself under this "null means
182 * there is nothing there now" rule before using them in search or
183 * reduce operations.
184 *
185 * <p>Methods accepting and/or returning Entry arguments maintain
186 * key-value associations. They may be useful for example when
187 * finding the key for the greatest value. Note that "plain" Entry
188 * arguments can be supplied using {@code new
189 * AbstractMap.SimpleEntry(k,v)}.
190 *
191 * <p>Bulk operations may complete abruptly, throwing an
192 * exception encountered in the application of a supplied
193 * function. Bear in mind when handling such exceptions that other
194 * concurrently executing functions could also have thrown
195 * exceptions, or would have done so if the first exception had
196 * not occurred.
197 *
198 * <p>Speedups for parallel compared to sequential forms are common
199 * but not guaranteed. Parallel operations involving brief functions
200 * on small maps may execute more slowly than sequential forms if the
201 * underlying work to parallelize the computation is more expensive
202 * than the computation itself. Similarly, parallelization may not
203 * lead to much actual parallelism if all processors are busy
204 * performing unrelated tasks.
205 *
206 * <p>All arguments to all task methods must be non-null.
207 *
208 * <p>This class is a member of the
209 * <a href="{@docRoot}/../technotes/guides/collections/index.html">
210 * Java Collections Framework</a>.
211 *
212 * @since 1.5
213 * @author Doug Lea
214 * @param <K> the type of keys maintained by this map
215 * @param <V> the type of mapped values
216 */
217 public class ConcurrentHashMap<K, V>
218 implements ConcurrentMap<K, V>, Serializable {
219 private static final long serialVersionUID = 7249069246763182397L;
220
221 /*
222 * Overview:
223 *
224 * The primary design goal of this hash table is to maintain
225 * concurrent readability (typically method get(), but also
226 * iterators and related methods) while minimizing update
227 * contention. Secondary goals are to keep space consumption about
228 * the same or better than java.util.HashMap, and to support high
229 * initial insertion rates on an empty table by many threads.
230 *
231 * Each key-value mapping is held in a Node. Because Node key
232 * fields can contain special values, they are defined using plain
233 * Object types (not type "K"). This leads to a lot of explicit
234 * casting (and many explicit warning suppressions to tell
235 * compilers not to complain about it). It also allows some of the
236 * public methods to be factored into a smaller number of internal
237 * methods (although sadly not so for the five variants of
238 * put-related operations). The validation-based approach
239 * explained below leads to a lot of code sprawl because
240 * retry-control precludes factoring into smaller methods.
241 *
242 * The table is lazily initialized to a power-of-two size upon the
243 * first insertion. Each bin in the table normally contains a
244 * list of Nodes (most often, the list has only zero or one Node).
245 * Table accesses require volatile/atomic reads, writes, and
246 * CASes. Because there is no other way to arrange this without
247 * adding further indirections, we use intrinsics
248 * (sun.misc.Unsafe) operations. The lists of nodes within bins
249 * are always accurately traversable under volatile reads, so long
250 * as lookups check hash code and non-nullness of value before
251 * checking key equality.
252 *
253 * We use the top (sign) bit of Node hash fields for control
254 * purposes -- it is available anyway because of addressing
255 * constraints. Nodes with negative hash fields are forwarding
256 * nodes to either TreeBins or resized tables. The lower 31 bits
257 * of each normal Node's hash field contain a transformation of
258 * the key's hash code.
259 *
260 * Insertion (via put or its variants) of the first node in an
261 * empty bin is performed by just CASing it to the bin. This is
262 * by far the most common case for put operations under most
263 * key/hash distributions. Other update operations (insert,
264 * delete, and replace) require locks. We do not want to waste
265 * the space required to associate a distinct lock object with
266 * each bin, so instead use the first node of a bin list itself as
267 * a lock. Locking support for these locks relies on builtin
268 * "synchronized" monitors.
269 *
270 * Using the first node of a list as a lock does not by itself
271 * suffice though: When a node is locked, any update must first
272 * validate that it is still the first node after locking it, and
273 * retry if not. Because new nodes are always appended to lists,
274 * once a node is first in a bin, it remains first until deleted
275 * or the bin becomes invalidated (upon resizing). However,
276 * operations that only conditionally update may inspect nodes
277 * until the point of update. This is a converse of sorts to the
278 * lazy locking technique described by Herlihy & Shavit.
279 *
280 * The main disadvantage of per-bin locks is that other update
281 * operations on other nodes in a bin list protected by the same
282 * lock can stall, for example when user equals() or mapping
283 * functions take a long time. However, statistically, under
284 * random hash codes, this is not a common problem. Ideally, the
285 * frequency of nodes in bins follows a Poisson distribution
286 * (http://en.wikipedia.org/wiki/Poisson_distribution) with a
287 * parameter of about 0.5 on average, given the resizing threshold
288 * of 0.75, although with a large variance because of resizing
289 * granularity. Ignoring variance, the expected occurrences of
290 * list size k are (exp(-0.5) * pow(0.5, k) / factorial(k)). The
291 * first values are:
292 *
293 * 0: 0.60653066
294 * 1: 0.30326533
295 * 2: 0.07581633
296 * 3: 0.01263606
297 * 4: 0.00157952
298 * 5: 0.00015795
299 * 6: 0.00001316
300 * 7: 0.00000094
301 * 8: 0.00000006
302 * more: less than 1 in ten million
303 *
304 * Lock contention probability for two threads accessing distinct
305 * elements is roughly 1 / (8 * #elements) under random hashes.
306 *
307 * Actual hash code distributions encountered in practice
308 * sometimes deviate significantly from uniform randomness. This
309 * includes the case when N > (1<<30), so some keys MUST collide.
310 * Similarly for dumb or hostile usages in which multiple keys are
311 * designed to have identical hash codes. Also, although we guard
312 * against the worst effects of this (see method spread), sets of
313 * hashes may differ only in bits that do not impact their bin
314 * index for a given power-of-two mask. So we use a secondary
315 * strategy that applies when the number of nodes in a bin exceeds
316 * a threshold, and at least one of the keys implements
317 * Comparable. These TreeBins use a balanced tree to hold nodes
318 * (a specialized form of red-black trees), bounding search time
319 * to O(log N). Each search step in a TreeBin is around twice as
320 * slow as in a regular list, but given that N cannot exceed
321 * (1<<64) (before running out of addresses) this bounds search
322 * steps, lock hold times, etc, to reasonable constants (roughly
323 * 100 nodes inspected per operation worst case) so long as keys
324 * are Comparable (which is very common -- String, Long, etc).
325 * TreeBin nodes (TreeNodes) also maintain the same "next"
326 * traversal pointers as regular nodes, so can be traversed in
327 * iterators in the same way.
328 *
329 * The table is resized when occupancy exceeds a percentage
330 * threshold (nominally, 0.75, but see below). Any thread
331 * noticing an overfull bin may assist in resizing after the
332 * initiating thread allocates and sets up the replacement
333 * array. However, rather than stalling, these other threads may
334 * proceed with insertions etc. The use of TreeBins shields us
335 * from the worst case effects of overfilling while resizes are in
336 * progress. Resizing proceeds by transferring bins, one by one,
337 * from the table to the next table. To enable concurrency, the
338 * next table must be (incrementally) prefilled with place-holders
339 * serving as reverse forwarders to the old table. Because we are
340 * using power-of-two expansion, the elements from each bin must
341 * either stay at same index, or move with a power of two
342 * offset. We eliminate unnecessary node creation by catching
343 * cases where old nodes can be reused because their next fields
344 * won't change. On average, only about one-sixth of them need
345 * cloning when a table doubles. The nodes they replace will be
346 * garbage collectable as soon as they are no longer referenced by
347 * any reader thread that may be in the midst of concurrently
348 * traversing table. Upon transfer, the old table bin contains
349 * only a special forwarding node (with hash field "MOVED") that
350 * contains the next table as its key. On encountering a
351 * forwarding node, access and update operations restart, using
352 * the new table.
353 *
354 * Each bin transfer requires its bin lock, which can stall
355 * waiting for locks while resizing. However, because other
356 * threads can join in and help resize rather than contend for
357 * locks, average aggregate waits become shorter as resizing
358 * progresses. The transfer operation must also ensure that all
359 * accessible bins in both the old and new table are usable by any
360 * traversal. This is arranged by proceeding from the last bin
361 * (table.length - 1) up towards the first. Upon seeing a
362 * forwarding node, traversals (see class Traverser) arrange to
363 * move to the new table without revisiting nodes. However, to
364 * ensure that no intervening nodes are skipped, bin splitting can
365 * only begin after the associated reverse-forwarders are in
366 * place.
367 *
368 * The traversal scheme also applies to partial traversals of
369 * ranges of bins (via an alternate Traverser constructor)
370 * to support partitioned aggregate operations. Also, read-only
371 * operations give up if ever forwarded to a null table, which
372 * provides support for shutdown-style clearing, which is also not
373 * currently implemented.
374 *
375 * Lazy table initialization minimizes footprint until first use,
376 * and also avoids resizings when the first operation is from a
377 * putAll, constructor with map argument, or deserialization.
378 * These cases attempt to override the initial capacity settings,
379 * but harmlessly fail to take effect in cases of races.
380 *
381 * The element count is maintained using a specialization of
382 * LongAdder. We need to incorporate a specialization rather than
383 * just use a LongAdder in order to access implicit
384 * contention-sensing that leads to creation of multiple
385 * Cells. The counter mechanics avoid contention on
386 * updates but can encounter cache thrashing if read too
387 * frequently during concurrent access. To avoid reading so often,
388 * resizing under contention is attempted only upon adding to a
389 * bin already holding two or more nodes. Under uniform hash
390 * distributions, the probability of this occurring at threshold
391 * is around 13%, meaning that only about 1 in 8 puts check
392 * threshold (and after resizing, many fewer do so). The bulk
393 * putAll operation further reduces contention by only committing
394 * count updates upon these size checks.
395 *
396 * Maintaining API and serialization compatibility with previous
397 * versions of this class introduces several oddities. Mainly: We
398 * leave untouched but unused constructor arguments refering to
399 * concurrencyLevel. We accept a loadFactor constructor argument,
400 * but apply it only to initial table capacity (which is the only
401 * time that we can guarantee to honor it.) We also declare an
402 * unused "Segment" class that is instantiated in minimal form
403 * only when serializing.
404 */
405
406 /* ---------------- Constants -------------- */
407
408 /**
409 * The largest possible table capacity. This value must be
410 * exactly 1<<30 to stay within Java array allocation and indexing
411 * bounds for power of two table sizes, and is further required
412 * because the top two bits of 32bit hash fields are used for
413 * control purposes.
414 */
415 private static final int MAXIMUM_CAPACITY = 1 << 30;
416
417 /**
418 * The default initial table capacity. Must be a power of 2
419 * (i.e., at least 1) and at most MAXIMUM_CAPACITY.
420 */
421 private static final int DEFAULT_CAPACITY = 16;
422
423 /**
424 * The largest possible (non-power of two) array size.
425 * Needed by toArray and related methods.
426 */
427 static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8;
428
429 /**
430 * The default concurrency level for this table. Unused but
431 * defined for compatibility with previous versions of this class.
432 */
433 private static final int DEFAULT_CONCURRENCY_LEVEL = 16;
434
435 /**
436 * The load factor for this table. Overrides of this value in
437 * constructors affect only the initial table capacity. The
438 * actual floating point value isn't normally used -- it is
439 * simpler to use expressions such as {@code n - (n >>> 2)} for
440 * the associated resizing threshold.
441 */
442 private static final float LOAD_FACTOR = 0.75f;
443
444 /**
445 * The bin count threshold for using a tree rather than list for a
446 * bin. The value reflects the approximate break-even point for
447 * using tree-based operations.
448 */
449 private static final int TREE_THRESHOLD = 8;
450
451 /**
452 * Minimum number of rebinnings per transfer step. Ranges are
453 * subdivided to allow multiple resizer threads. This value
454 * serves as a lower bound to avoid resizers encountering
455 * excessive memory contention. The value should be at least
456 * DEFAULT_CAPACITY.
457 */
458 private static final int MIN_TRANSFER_STRIDE = 16;
459
460 /*
461 * Encodings for Node hash fields. See above for explanation.
462 */
463 static final int MOVED = 0x80000000; // hash field for forwarding nodes
464 static final int HASH_BITS = 0x7fffffff; // usable bits of normal node hash
465
466 /** Number of CPUS, to place bounds on some sizings */
467 static final int NCPU = Runtime.getRuntime().availableProcessors();
468
469 /* ---------------- Counters -------------- */
470
471 // Adapted from LongAdder and Striped64.
472 // See their internal docs for explanation.
473
474 // A padded cell for distributing counts
475 static final class Cell {
476 volatile long p0, p1, p2, p3, p4, p5, p6;
477 volatile long value;
478 volatile long q0, q1, q2, q3, q4, q5, q6;
479 Cell(long x) { value = x; }
480 }
481
482 /**
483 * Holder for the thread-local hash code determining which
484 * Cell to use. The code is initialized via the
485 * cellHashCodeGenerator, but may be moved upon collisions.
486 */
487 static final class CellHashCode {
488 int code;
489 }
490
491 /**
492 * Generates initial value for per-thread CellHashCodes
493 */
494 static final AtomicInteger cellHashCodeGenerator = new AtomicInteger();
495
496 /**
497 * Increment for cellHashCodeGenerator. See class ThreadLocal
498 * for explanation.
499 */
500 static final int SEED_INCREMENT = 0x61c88647;
501
502 /**
503 * Per-thread counter hash codes. Shared across all instances
504 */
505 static final ThreadLocal<CellHashCode> threadCellHashCode =
506 new ThreadLocal<CellHashCode>();
507
508 /* ---------------- Fields -------------- */
509
510 /**
511 * The array of bins. Lazily initialized upon first insertion.
512 * Size is always a power of two. Accessed directly by iterators.
513 */
514 transient volatile Node<V>[] table;
515
516 /**
517 * The next table to use; non-null only while resizing.
518 */
519 private transient volatile Node<V>[] nextTable;
520
521 /**
522 * Base counter value, used mainly when there is no contention,
523 * but also as a fallback during table initialization
524 * races. Updated via CAS.
525 */
526 private transient volatile long baseCount;
527
528 /**
529 * Table initialization and resizing control. When negative, the
530 * table is being initialized or resized: -1 for initialization,
531 * else -(1 + the number of active resizing threads). Otherwise,
532 * when table is null, holds the initial table size to use upon
533 * creation, or 0 for default. After initialization, holds the
534 * next element count value upon which to resize the table.
535 */
536 private transient volatile int sizeCtl;
537
538 /**
539 * The next table index (plus one) to split while resizing.
540 */
541 private transient volatile int transferIndex;
542
543 /**
544 * The least available table index to split while resizing.
545 */
546 private transient volatile int transferOrigin;
547
548 /**
549 * Spinlock (locked via CAS) used when resizing and/or creating Cells.
550 */
551 private transient volatile int cellsBusy;
552
553 /**
554 * Table of counter cells. When non-null, size is a power of 2.
555 */
556 private transient volatile Cell[] counterCells;
557
558 // views
559 private transient KeySetView<K,V> keySet;
560 private transient ValuesView<K,V> values;
561 private transient EntrySetView<K,V> entrySet;
562
563 /** For serialization compatibility. Null unless serialized; see below */
564 private Segment<K,V>[] segments;
565
566 /* ---------------- Table element access -------------- */
567
568 /*
569 * Volatile access methods are used for table elements as well as
570 * elements of in-progress next table while resizing. Uses are
571 * null checked by callers, and implicitly bounds-checked, relying
572 * on the invariants that tab arrays have non-zero size, and all
573 * indices are masked with (tab.length - 1) which is never
574 * negative and always less than length. Note that, to be correct
575 * wrt arbitrary concurrency errors by users, bounds checks must
576 * operate on local variables, which accounts for some odd-looking
577 * inline assignments below.
578 */
579
580 @SuppressWarnings("unchecked") static final <V> Node<V> tabAt
581 (Node<V>[] tab, int i) { // used by Traverser
582 return (Node<V>)U.getObjectVolatile(tab, ((long)i << ASHIFT) + ABASE);
583 }
584
585 private static final <V> boolean casTabAt
586 (Node<V>[] tab, int i, Node<V> c, Node<V> v) {
587 return U.compareAndSwapObject(tab, ((long)i << ASHIFT) + ABASE, c, v);
588 }
589
590 private static final <V> void setTabAt
591 (Node<V>[] tab, int i, Node<V> v) {
592 U.putObjectVolatile(tab, ((long)i << ASHIFT) + ABASE, v);
593 }
594
595 /* ---------------- Nodes -------------- */
596
597 /**
598 * Key-value entry. Note that this is never exported out as a
599 * user-visible Map.Entry (see MapEntry below). Nodes with a hash
600 * field of MOVED are special, and do not contain user keys or
601 * values. Otherwise, keys are never null, and null val fields
602 * indicate that a node is in the process of being deleted or
603 * created. For purposes of read-only access, a key may be read
604 * before a val, but can only be used after checking val to be
605 * non-null.
606 */
607 static class Node<V> {
608 final int hash;
609 final Object key;
610 volatile V val;
611 volatile Node<V> next;
612
613 Node(int hash, Object key, V val, Node<V> next) {
614 this.hash = hash;
615 this.key = key;
616 this.val = val;
617 this.next = next;
618 }
619 }
620
621 /* ---------------- TreeBins -------------- */
622
623 /**
624 * Nodes for use in TreeBins
625 */
626 static final class TreeNode<V> extends Node<V> {
627 TreeNode<V> parent; // red-black tree links
628 TreeNode<V> left;
629 TreeNode<V> right;
630 TreeNode<V> prev; // needed to unlink next upon deletion
631 boolean red;
632
633 TreeNode(int hash, Object key, V val, Node<V> next, TreeNode<V> parent) {
634 super(hash, key, val, next);
635 this.parent = parent;
636 }
637 }
638
639 /**
640 * A specialized form of red-black tree for use in bins
641 * whose size exceeds a threshold.
642 *
643 * TreeBins use a special form of comparison for search and
644 * related operations (which is the main reason we cannot use
645 * existing collections such as TreeMaps). TreeBins contain
646 * Comparable elements, but may contain others, as well as
647 * elements that are Comparable but not necessarily Comparable<T>
648 * for the same T, so we cannot invoke compareTo among them. To
649 * handle this, the tree is ordered primarily by hash value, then
650 * by getClass().getName() order, and then by Comparator order
651 * among elements of the same class. On lookup at a node, if
652 * elements are not comparable or compare as 0, both left and
653 * right children may need to be searched in the case of tied hash
654 * values. (This corresponds to the full list search that would be
655 * necessary if all elements were non-Comparable and had tied
656 * hashes.) The red-black balancing code is updated from
657 * pre-jdk-collections
658 * (http://gee.cs.oswego.edu/dl/classes/collections/RBCell.java)
659 * based in turn on Cormen, Leiserson, and Rivest "Introduction to
660 * Algorithms" (CLR).
661 *
662 * TreeBins also maintain a separate locking discipline than
663 * regular bins. Because they are forwarded via special MOVED
664 * nodes at bin heads (which can never change once established),
665 * we cannot use those nodes as locks. Instead, TreeBin
666 * extends AbstractQueuedSynchronizer to support a simple form of
667 * read-write lock. For update operations and table validation,
668 * the exclusive form of lock behaves in the same way as bin-head
669 * locks. However, lookups use shared read-lock mechanics to allow
670 * multiple readers in the absence of writers. Additionally,
671 * these lookups do not ever block: While the lock is not
672 * available, they proceed along the slow traversal path (via
673 * next-pointers) until the lock becomes available or the list is
674 * exhausted, whichever comes first. (These cases are not fast,
675 * but maximize aggregate expected throughput.) The AQS mechanics
676 * for doing this are straightforward. The lock state is held as
677 * AQS getState(). Read counts are negative; the write count (1)
678 * is positive. There are no signalling preferences among readers
679 * and writers. Since we don't need to export full Lock API, we
680 * just override the minimal AQS methods and use them directly.
681 */
682 static final class TreeBin<V> extends AbstractQueuedSynchronizer {
683 private static final long serialVersionUID = 2249069246763182397L;
684 transient TreeNode<V> root; // root of tree
685 transient TreeNode<V> first; // head of next-pointer list
686
687 /* AQS overrides */
688 public final boolean isHeldExclusively() { return getState() > 0; }
689 public final boolean tryAcquire(int ignore) {
690 if (compareAndSetState(0, 1)) {
691 setExclusiveOwnerThread(Thread.currentThread());
692 return true;
693 }
694 return false;
695 }
696 public final boolean tryRelease(int ignore) {
697 setExclusiveOwnerThread(null);
698 setState(0);
699 return true;
700 }
701 public final int tryAcquireShared(int ignore) {
702 for (int c;;) {
703 if ((c = getState()) > 0)
704 return -1;
705 if (compareAndSetState(c, c -1))
706 return 1;
707 }
708 }
709 public final boolean tryReleaseShared(int ignore) {
710 int c;
711 do {} while (!compareAndSetState(c = getState(), c + 1));
712 return c == -1;
713 }
714
715 /** From CLR */
716 private void rotateLeft(TreeNode<V> p) {
717 if (p != null) {
718 TreeNode<V> r = p.right, pp, rl;
719 if ((rl = p.right = r.left) != null)
720 rl.parent = p;
721 if ((pp = r.parent = p.parent) == null)
722 root = r;
723 else if (pp.left == p)
724 pp.left = r;
725 else
726 pp.right = r;
727 r.left = p;
728 p.parent = r;
729 }
730 }
731
732 /** From CLR */
733 private void rotateRight(TreeNode<V> p) {
734 if (p != null) {
735 TreeNode<V> l = p.left, pp, lr;
736 if ((lr = p.left = l.right) != null)
737 lr.parent = p;
738 if ((pp = l.parent = p.parent) == null)
739 root = l;
740 else if (pp.right == p)
741 pp.right = l;
742 else
743 pp.left = l;
744 l.right = p;
745 p.parent = l;
746 }
747 }
748
749 /**
750 * Returns the TreeNode (or null if not found) for the given key
751 * starting at given root.
752 */
753 @SuppressWarnings("unchecked") final TreeNode<V> getTreeNode
754 (int h, Object k, TreeNode<V> p) {
755 Class<?> c = k.getClass();
756 while (p != null) {
757 int dir, ph; Object pk; Class<?> pc;
758 if ((ph = p.hash) == h) {
759 if ((pk = p.key) == k || k.equals(pk))
760 return p;
761 if (c != (pc = pk.getClass()) ||
762 !(k instanceof Comparable) ||
763 (dir = ((Comparable)k).compareTo((Comparable)pk)) == 0) {
764 if ((dir = (c == pc) ? 0 :
765 c.getName().compareTo(pc.getName())) == 0) {
766 TreeNode<V> r = null, pl, pr; // check both sides
767 if ((pr = p.right) != null && h >= pr.hash &&
768 (r = getTreeNode(h, k, pr)) != null)
769 return r;
770 else if ((pl = p.left) != null && h <= pl.hash)
771 dir = -1;
772 else // nothing there
773 return null;
774 }
775 }
776 }
777 else
778 dir = (h < ph) ? -1 : 1;
779 p = (dir > 0) ? p.right : p.left;
780 }
781 return null;
782 }
783
784 /**
785 * Wrapper for getTreeNode used by CHM.get. Tries to obtain
786 * read-lock to call getTreeNode, but during failure to get
787 * lock, searches along next links.
788 */
789 final V getValue(int h, Object k) {
790 Node<V> r = null;
791 int c = getState(); // Must read lock state first
792 for (Node<V> e = first; e != null; e = e.next) {
793 if (c <= 0 && compareAndSetState(c, c - 1)) {
794 try {
795 r = getTreeNode(h, k, root);
796 } finally {
797 releaseShared(0);
798 }
799 break;
800 }
801 else if (e.hash == h && k.equals(e.key)) {
802 r = e;
803 break;
804 }
805 else
806 c = getState();
807 }
808 return r == null ? null : r.val;
809 }
810
811 /**
812 * Finds or adds a node.
813 * @return null if added
814 */
815 @SuppressWarnings("unchecked") final TreeNode<V> putTreeNode
816 (int h, Object k, V v) {
817 Class<?> c = k.getClass();
818 TreeNode<V> pp = root, p = null;
819 int dir = 0;
820 while (pp != null) { // find existing node or leaf to insert at
821 int ph; Object pk; Class<?> pc;
822 p = pp;
823 if ((ph = p.hash) == h) {
824 if ((pk = p.key) == k || k.equals(pk))
825 return p;
826 if (c != (pc = pk.getClass()) ||
827 !(k instanceof Comparable) ||
828 (dir = ((Comparable)k).compareTo((Comparable)pk)) == 0) {
829 TreeNode<V> s = null, r = null, pr;
830 if ((dir = (c == pc) ? 0 :
831 c.getName().compareTo(pc.getName())) == 0) {
832 if ((pr = p.right) != null && h >= pr.hash &&
833 (r = getTreeNode(h, k, pr)) != null)
834 return r;
835 else // continue left
836 dir = -1;
837 }
838 else if ((pr = p.right) != null && h >= pr.hash)
839 s = pr;
840 if (s != null && (r = getTreeNode(h, k, s)) != null)
841 return r;
842 }
843 }
844 else
845 dir = (h < ph) ? -1 : 1;
846 pp = (dir > 0) ? p.right : p.left;
847 }
848
849 TreeNode<V> f = first;
850 TreeNode<V> x = first = new TreeNode<V>(h, k, v, f, p);
851 if (p == null)
852 root = x;
853 else { // attach and rebalance; adapted from CLR
854 TreeNode<V> xp, xpp;
855 if (f != null)
856 f.prev = x;
857 if (dir <= 0)
858 p.left = x;
859 else
860 p.right = x;
861 x.red = true;
862 while (x != null && (xp = x.parent) != null && xp.red &&
863 (xpp = xp.parent) != null) {
864 TreeNode<V> xppl = xpp.left;
865 if (xp == xppl) {
866 TreeNode<V> y = xpp.right;
867 if (y != null && y.red) {
868 y.red = false;
869 xp.red = false;
870 xpp.red = true;
871 x = xpp;
872 }
873 else {
874 if (x == xp.right) {
875 rotateLeft(x = xp);
876 xpp = (xp = x.parent) == null ? null : xp.parent;
877 }
878 if (xp != null) {
879 xp.red = false;
880 if (xpp != null) {
881 xpp.red = true;
882 rotateRight(xpp);
883 }
884 }
885 }
886 }
887 else {
888 TreeNode<V> y = xppl;
889 if (y != null && y.red) {
890 y.red = false;
891 xp.red = false;
892 xpp.red = true;
893 x = xpp;
894 }
895 else {
896 if (x == xp.left) {
897 rotateRight(x = xp);
898 xpp = (xp = x.parent) == null ? null : xp.parent;
899 }
900 if (xp != null) {
901 xp.red = false;
902 if (xpp != null) {
903 xpp.red = true;
904 rotateLeft(xpp);
905 }
906 }
907 }
908 }
909 }
910 TreeNode<V> r = root;
911 if (r != null && r.red)
912 r.red = false;
913 }
914 return null;
915 }
916
917 /**
918 * Removes the given node, that must be present before this
919 * call. This is messier than typical red-black deletion code
920 * because we cannot swap the contents of an interior node
921 * with a leaf successor that is pinned by "next" pointers
922 * that are accessible independently of lock. So instead we
923 * swap the tree linkages.
924 */
925 final void deleteTreeNode(TreeNode<V> p) {
926 TreeNode<V> next = (TreeNode<V>)p.next; // unlink traversal pointers
927 TreeNode<V> pred = p.prev;
928 if (pred == null)
929 first = next;
930 else
931 pred.next = next;
932 if (next != null)
933 next.prev = pred;
934 TreeNode<V> replacement;
935 TreeNode<V> pl = p.left;
936 TreeNode<V> pr = p.right;
937 if (pl != null && pr != null) {
938 TreeNode<V> s = pr, sl;
939 while ((sl = s.left) != null) // find successor
940 s = sl;
941 boolean c = s.red; s.red = p.red; p.red = c; // swap colors
942 TreeNode<V> sr = s.right;
943 TreeNode<V> pp = p.parent;
944 if (s == pr) { // p was s's direct parent
945 p.parent = s;
946 s.right = p;
947 }
948 else {
949 TreeNode<V> sp = s.parent;
950 if ((p.parent = sp) != null) {
951 if (s == sp.left)
952 sp.left = p;
953 else
954 sp.right = p;
955 }
956 if ((s.right = pr) != null)
957 pr.parent = s;
958 }
959 p.left = null;
960 if ((p.right = sr) != null)
961 sr.parent = p;
962 if ((s.left = pl) != null)
963 pl.parent = s;
964 if ((s.parent = pp) == null)
965 root = s;
966 else if (p == pp.left)
967 pp.left = s;
968 else
969 pp.right = s;
970 replacement = sr;
971 }
972 else
973 replacement = (pl != null) ? pl : pr;
974 TreeNode<V> pp = p.parent;
975 if (replacement == null) {
976 if (pp == null) {
977 root = null;
978 return;
979 }
980 replacement = p;
981 }
982 else {
983 replacement.parent = pp;
984 if (pp == null)
985 root = replacement;
986 else if (p == pp.left)
987 pp.left = replacement;
988 else
989 pp.right = replacement;
990 p.left = p.right = p.parent = null;
991 }
992 if (!p.red) { // rebalance, from CLR
993 TreeNode<V> x = replacement;
994 while (x != null) {
995 TreeNode<V> xp, xpl;
996 if (x.red || (xp = x.parent) == null) {
997 x.red = false;
998 break;
999 }
1000 if (x == (xpl = xp.left)) {
1001 TreeNode<V> sib = xp.right;
1002 if (sib != null && sib.red) {
1003 sib.red = false;
1004 xp.red = true;
1005 rotateLeft(xp);
1006 sib = (xp = x.parent) == null ? null : xp.right;
1007 }
1008 if (sib == null)
1009 x = xp;
1010 else {
1011 TreeNode<V> sl = sib.left, sr = sib.right;
1012 if ((sr == null || !sr.red) &&
1013 (sl == null || !sl.red)) {
1014 sib.red = true;
1015 x = xp;
1016 }
1017 else {
1018 if (sr == null || !sr.red) {
1019 if (sl != null)
1020 sl.red = false;
1021 sib.red = true;
1022 rotateRight(sib);
1023 sib = (xp = x.parent) == null ?
1024 null : xp.right;
1025 }
1026 if (sib != null) {
1027 sib.red = (xp == null) ? false : xp.red;
1028 if ((sr = sib.right) != null)
1029 sr.red = false;
1030 }
1031 if (xp != null) {
1032 xp.red = false;
1033 rotateLeft(xp);
1034 }
1035 x = root;
1036 }
1037 }
1038 }
1039 else { // symmetric
1040 TreeNode<V> sib = xpl;
1041 if (sib != null && sib.red) {
1042 sib.red = false;
1043 xp.red = true;
1044 rotateRight(xp);
1045 sib = (xp = x.parent) == null ? null : xp.left;
1046 }
1047 if (sib == null)
1048 x = xp;
1049 else {
1050 TreeNode<V> sl = sib.left, sr = sib.right;
1051 if ((sl == null || !sl.red) &&
1052 (sr == null || !sr.red)) {
1053 sib.red = true;
1054 x = xp;
1055 }
1056 else {
1057 if (sl == null || !sl.red) {
1058 if (sr != null)
1059 sr.red = false;
1060 sib.red = true;
1061 rotateLeft(sib);
1062 sib = (xp = x.parent) == null ?
1063 null : xp.left;
1064 }
1065 if (sib != null) {
1066 sib.red = (xp == null) ? false : xp.red;
1067 if ((sl = sib.left) != null)
1068 sl.red = false;
1069 }
1070 if (xp != null) {
1071 xp.red = false;
1072 rotateRight(xp);
1073 }
1074 x = root;
1075 }
1076 }
1077 }
1078 }
1079 }
1080 if (p == replacement && (pp = p.parent) != null) {
1081 if (p == pp.left) // detach pointers
1082 pp.left = null;
1083 else if (p == pp.right)
1084 pp.right = null;
1085 p.parent = null;
1086 }
1087 }
1088 }
1089
1090 /* ---------------- Collision reduction methods -------------- */
1091
1092 /**
1093 * Spreads higher bits to lower, and also forces top bit to 0.
1094 * Because the table uses power-of-two masking, sets of hashes
1095 * that vary only in bits above the current mask will always
1096 * collide. (Among known examples are sets of Float keys holding
1097 * consecutive whole numbers in small tables.) To counter this,
1098 * we apply a transform that spreads the impact of higher bits
1099 * downward. There is a tradeoff between speed, utility, and
1100 * quality of bit-spreading. Because many common sets of hashes
1101 * are already reasonably distributed across bits (so don't benefit
1102 * from spreading), and because we use trees to handle large sets
1103 * of collisions in bins, we don't need excessively high quality.
1104 */
1105 private static final int spread(int h) {
1106 h ^= (h >>> 18) ^ (h >>> 12);
1107 return (h ^ (h >>> 10)) & HASH_BITS;
1108 }
1109
1110 /**
1111 * Replaces a list bin with a tree bin if key is comparable. Call
1112 * only when locked.
1113 */
1114 private final void replaceWithTreeBin(Node<V>[] tab, int index, Object key) {
1115 if (key instanceof Comparable) {
1116 TreeBin<V> t = new TreeBin<V>();
1117 for (Node<V> e = tabAt(tab, index); e != null; e = e.next)
1118 t.putTreeNode(e.hash, e.key, e.val);
1119 setTabAt(tab, index, new Node<V>(MOVED, t, null, null));
1120 }
1121 }
1122
1123 /* ---------------- Internal access and update methods -------------- */
1124
1125 /** Implementation for get and containsKey */
1126 @SuppressWarnings("unchecked") private final V internalGet(Object k) {
1127 int h = spread(k.hashCode());
1128 retry: for (Node<V>[] tab = table; tab != null;) {
1129 Node<V> e; Object ek; V ev; int eh; // locals to read fields once
1130 for (e = tabAt(tab, (tab.length - 1) & h); e != null; e = e.next) {
1131 if ((eh = e.hash) < 0) {
1132 if ((ek = e.key) instanceof TreeBin) // search TreeBin
1133 return ((TreeBin<V>)ek).getValue(h, k);
1134 else { // restart with new table
1135 tab = (Node<V>[])ek;
1136 continue retry;
1137 }
1138 }
1139 else if (eh == h && (ev = e.val) != null &&
1140 ((ek = e.key) == k || k.equals(ek)))
1141 return ev;
1142 }
1143 break;
1144 }
1145 return null;
1146 }
1147
1148 /**
1149 * Implementation for the four public remove/replace methods:
1150 * Replaces node value with v, conditional upon match of cv if
1151 * non-null. If resulting value is null, delete.
1152 */
1153 @SuppressWarnings("unchecked") private final V internalReplace
1154 (Object k, V v, Object cv) {
1155 int h = spread(k.hashCode());
1156 V oldVal = null;
1157 for (Node<V>[] tab = table;;) {
1158 Node<V> f; int i, fh; Object fk;
1159 if (tab == null ||
1160 (f = tabAt(tab, i = (tab.length - 1) & h)) == null)
1161 break;
1162 else if ((fh = f.hash) < 0) {
1163 if ((fk = f.key) instanceof TreeBin) {
1164 TreeBin<V> t = (TreeBin<V>)fk;
1165 boolean validated = false;
1166 boolean deleted = false;
1167 t.acquire(0);
1168 try {
1169 if (tabAt(tab, i) == f) {
1170 validated = true;
1171 TreeNode<V> p = t.getTreeNode(h, k, t.root);
1172 if (p != null) {
1173 V pv = p.val;
1174 if (cv == null || cv == pv || cv.equals(pv)) {
1175 oldVal = pv;
1176 if ((p.val = v) == null) {
1177 deleted = true;
1178 t.deleteTreeNode(p);
1179 }
1180 }
1181 }
1182 }
1183 } finally {
1184 t.release(0);
1185 }
1186 if (validated) {
1187 if (deleted)
1188 addCount(-1L, -1);
1189 break;
1190 }
1191 }
1192 else
1193 tab = (Node<V>[])fk;
1194 }
1195 else if (fh != h && f.next == null) // precheck
1196 break; // rules out possible existence
1197 else {
1198 boolean validated = false;
1199 boolean deleted = false;
1200 synchronized (f) {
1201 if (tabAt(tab, i) == f) {
1202 validated = true;
1203 for (Node<V> e = f, pred = null;;) {
1204 Object ek; V ev;
1205 if (e.hash == h &&
1206 ((ev = e.val) != null) &&
1207 ((ek = e.key) == k || k.equals(ek))) {
1208 if (cv == null || cv == ev || cv.equals(ev)) {
1209 oldVal = ev;
1210 if ((e.val = v) == null) {
1211 deleted = true;
1212 Node<V> en = e.next;
1213 if (pred != null)
1214 pred.next = en;
1215 else
1216 setTabAt(tab, i, en);
1217 }
1218 }
1219 break;
1220 }
1221 pred = e;
1222 if ((e = e.next) == null)
1223 break;
1224 }
1225 }
1226 }
1227 if (validated) {
1228 if (deleted)
1229 addCount(-1L, -1);
1230 break;
1231 }
1232 }
1233 }
1234 return oldVal;
1235 }
1236
1237 /*
1238 * Internal versions of insertion methods
1239 * All have the same basic structure as the first (internalPut):
1240 * 1. If table uninitialized, create
1241 * 2. If bin empty, try to CAS new node
1242 * 3. If bin stale, use new table
1243 * 4. if bin converted to TreeBin, validate and relay to TreeBin methods
1244 * 5. Lock and validate; if valid, scan and add or update
1245 *
1246 * The putAll method differs mainly in attempting to pre-allocate
1247 * enough table space, and also more lazily performs count updates
1248 * and checks.
1249 *
1250 * Most of the function-accepting methods can't be factored nicely
1251 * because they require different functional forms, so instead
1252 * sprawl out similar mechanics.
1253 */
1254
1255 /** Implementation for put and putIfAbsent */
1256 @SuppressWarnings("unchecked") private final V internalPut
1257 (K k, V v, boolean onlyIfAbsent) {
1258 if (k == null || v == null) throw new NullPointerException();
1259 int h = spread(k.hashCode());
1260 int len = 0;
1261 for (Node<V>[] tab = table;;) {
1262 int i, fh; Node<V> f; Object fk; V fv;
1263 if (tab == null)
1264 tab = initTable();
1265 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1266 if (casTabAt(tab, i, null, new Node<V>(h, k, v, null)))
1267 break; // no lock when adding to empty bin
1268 }
1269 else if ((fh = f.hash) < 0) {
1270 if ((fk = f.key) instanceof TreeBin) {
1271 TreeBin<V> t = (TreeBin<V>)fk;
1272 V oldVal = null;
1273 t.acquire(0);
1274 try {
1275 if (tabAt(tab, i) == f) {
1276 len = 2;
1277 TreeNode<V> p = t.putTreeNode(h, k, v);
1278 if (p != null) {
1279 oldVal = p.val;
1280 if (!onlyIfAbsent)
1281 p.val = v;
1282 }
1283 }
1284 } finally {
1285 t.release(0);
1286 }
1287 if (len != 0) {
1288 if (oldVal != null)
1289 return oldVal;
1290 break;
1291 }
1292 }
1293 else
1294 tab = (Node<V>[])fk;
1295 }
1296 else if (onlyIfAbsent && fh == h && (fv = f.val) != null &&
1297 ((fk = f.key) == k || k.equals(fk))) // peek while nearby
1298 return fv;
1299 else {
1300 V oldVal = null;
1301 synchronized (f) {
1302 if (tabAt(tab, i) == f) {
1303 len = 1;
1304 for (Node<V> e = f;; ++len) {
1305 Object ek; V ev;
1306 if (e.hash == h &&
1307 (ev = e.val) != null &&
1308 ((ek = e.key) == k || k.equals(ek))) {
1309 oldVal = ev;
1310 if (!onlyIfAbsent)
1311 e.val = v;
1312 break;
1313 }
1314 Node<V> last = e;
1315 if ((e = e.next) == null) {
1316 last.next = new Node<V>(h, k, v, null);
1317 if (len >= TREE_THRESHOLD)
1318 replaceWithTreeBin(tab, i, k);
1319 break;
1320 }
1321 }
1322 }
1323 }
1324 if (len != 0) {
1325 if (oldVal != null)
1326 return oldVal;
1327 break;
1328 }
1329 }
1330 }
1331 addCount(1L, len);
1332 return null;
1333 }
1334
1335 /** Implementation for computeIfAbsent */
1336 @SuppressWarnings("unchecked") private final V internalComputeIfAbsent
1337 (K k, Function<? super K, ? extends V> mf) {
1338 if (k == null || mf == null)
1339 throw new NullPointerException();
1340 int h = spread(k.hashCode());
1341 V val = null;
1342 int len = 0;
1343 for (Node<V>[] tab = table;;) {
1344 Node<V> f; int i; Object fk;
1345 if (tab == null)
1346 tab = initTable();
1347 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1348 Node<V> node = new Node<V>(h, k, null, null);
1349 synchronized (node) {
1350 if (casTabAt(tab, i, null, node)) {
1351 len = 1;
1352 try {
1353 if ((val = mf.apply(k)) != null)
1354 node.val = val;
1355 } finally {
1356 if (val == null)
1357 setTabAt(tab, i, null);
1358 }
1359 }
1360 }
1361 if (len != 0)
1362 break;
1363 }
1364 else if (f.hash < 0) {
1365 if ((fk = f.key) instanceof TreeBin) {
1366 TreeBin<V> t = (TreeBin<V>)fk;
1367 boolean added = false;
1368 t.acquire(0);
1369 try {
1370 if (tabAt(tab, i) == f) {
1371 len = 1;
1372 TreeNode<V> p = t.getTreeNode(h, k, t.root);
1373 if (p != null)
1374 val = p.val;
1375 else if ((val = mf.apply(k)) != null) {
1376 added = true;
1377 len = 2;
1378 t.putTreeNode(h, k, val);
1379 }
1380 }
1381 } finally {
1382 t.release(0);
1383 }
1384 if (len != 0) {
1385 if (!added)
1386 return val;
1387 break;
1388 }
1389 }
1390 else
1391 tab = (Node<V>[])fk;
1392 }
1393 else {
1394 for (Node<V> e = f; e != null; e = e.next) { // prescan
1395 Object ek; V ev;
1396 if (e.hash == h && (ev = e.val) != null &&
1397 ((ek = e.key) == k || k.equals(ek)))
1398 return ev;
1399 }
1400 boolean added = false;
1401 synchronized (f) {
1402 if (tabAt(tab, i) == f) {
1403 len = 1;
1404 for (Node<V> e = f;; ++len) {
1405 Object ek; V ev;
1406 if (e.hash == h &&
1407 (ev = e.val) != null &&
1408 ((ek = e.key) == k || k.equals(ek))) {
1409 val = ev;
1410 break;
1411 }
1412 Node<V> last = e;
1413 if ((e = e.next) == null) {
1414 if ((val = mf.apply(k)) != null) {
1415 added = true;
1416 last.next = new Node<V>(h, k, val, null);
1417 if (len >= TREE_THRESHOLD)
1418 replaceWithTreeBin(tab, i, k);
1419 }
1420 break;
1421 }
1422 }
1423 }
1424 }
1425 if (len != 0) {
1426 if (!added)
1427 return val;
1428 break;
1429 }
1430 }
1431 }
1432 if (val != null)
1433 addCount(1L, len);
1434 return val;
1435 }
1436
1437 /** Implementation for compute */
1438 @SuppressWarnings("unchecked") private final V internalCompute
1439 (K k, boolean onlyIfPresent,
1440 BiFunction<? super K, ? super V, ? extends V> mf) {
1441 if (k == null || mf == null)
1442 throw new NullPointerException();
1443 int h = spread(k.hashCode());
1444 V val = null;
1445 int delta = 0;
1446 int len = 0;
1447 for (Node<V>[] tab = table;;) {
1448 Node<V> f; int i, fh; Object fk;
1449 if (tab == null)
1450 tab = initTable();
1451 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1452 if (onlyIfPresent)
1453 break;
1454 Node<V> node = new Node<V>(h, k, null, null);
1455 synchronized (node) {
1456 if (casTabAt(tab, i, null, node)) {
1457 try {
1458 len = 1;
1459 if ((val = mf.apply(k, null)) != null) {
1460 node.val = val;
1461 delta = 1;
1462 }
1463 } finally {
1464 if (delta == 0)
1465 setTabAt(tab, i, null);
1466 }
1467 }
1468 }
1469 if (len != 0)
1470 break;
1471 }
1472 else if ((fh = f.hash) < 0) {
1473 if ((fk = f.key) instanceof TreeBin) {
1474 TreeBin<V> t = (TreeBin<V>)fk;
1475 t.acquire(0);
1476 try {
1477 if (tabAt(tab, i) == f) {
1478 len = 1;
1479 TreeNode<V> p = t.getTreeNode(h, k, t.root);
1480 if (p == null && onlyIfPresent)
1481 break;
1482 V pv = (p == null) ? null : p.val;
1483 if ((val = mf.apply(k, pv)) != null) {
1484 if (p != null)
1485 p.val = val;
1486 else {
1487 len = 2;
1488 delta = 1;
1489 t.putTreeNode(h, k, val);
1490 }
1491 }
1492 else if (p != null) {
1493 delta = -1;
1494 t.deleteTreeNode(p);
1495 }
1496 }
1497 } finally {
1498 t.release(0);
1499 }
1500 if (len != 0)
1501 break;
1502 }
1503 else
1504 tab = (Node<V>[])fk;
1505 }
1506 else {
1507 synchronized (f) {
1508 if (tabAt(tab, i) == f) {
1509 len = 1;
1510 for (Node<V> e = f, pred = null;; ++len) {
1511 Object ek; V ev;
1512 if (e.hash == h &&
1513 (ev = e.val) != null &&
1514 ((ek = e.key) == k || k.equals(ek))) {
1515 val = mf.apply(k, ev);
1516 if (val != null)
1517 e.val = val;
1518 else {
1519 delta = -1;
1520 Node<V> en = e.next;
1521 if (pred != null)
1522 pred.next = en;
1523 else
1524 setTabAt(tab, i, en);
1525 }
1526 break;
1527 }
1528 pred = e;
1529 if ((e = e.next) == null) {
1530 if (!onlyIfPresent &&
1531 (val = mf.apply(k, null)) != null) {
1532 pred.next = new Node<V>(h, k, val, null);
1533 delta = 1;
1534 if (len >= TREE_THRESHOLD)
1535 replaceWithTreeBin(tab, i, k);
1536 }
1537 break;
1538 }
1539 }
1540 }
1541 }
1542 if (len != 0)
1543 break;
1544 }
1545 }
1546 if (delta != 0)
1547 addCount((long)delta, len);
1548 return val;
1549 }
1550
1551 /** Implementation for merge */
1552 @SuppressWarnings("unchecked") private final V internalMerge
1553 (K k, V v, BiFunction<? super V, ? super V, ? extends V> mf) {
1554 if (k == null || v == null || mf == null)
1555 throw new NullPointerException();
1556 int h = spread(k.hashCode());
1557 V val = null;
1558 int delta = 0;
1559 int len = 0;
1560 for (Node<V>[] tab = table;;) {
1561 int i; Node<V> f; Object fk; V fv;
1562 if (tab == null)
1563 tab = initTable();
1564 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1565 if (casTabAt(tab, i, null, new Node<V>(h, k, v, null))) {
1566 delta = 1;
1567 val = v;
1568 break;
1569 }
1570 }
1571 else if (f.hash < 0) {
1572 if ((fk = f.key) instanceof TreeBin) {
1573 TreeBin<V> t = (TreeBin<V>)fk;
1574 t.acquire(0);
1575 try {
1576 if (tabAt(tab, i) == f) {
1577 len = 1;
1578 TreeNode<V> p = t.getTreeNode(h, k, t.root);
1579 val = (p == null) ? v : mf.apply(p.val, v);
1580 if (val != null) {
1581 if (p != null)
1582 p.val = val;
1583 else {
1584 len = 2;
1585 delta = 1;
1586 t.putTreeNode(h, k, val);
1587 }
1588 }
1589 else if (p != null) {
1590 delta = -1;
1591 t.deleteTreeNode(p);
1592 }
1593 }
1594 } finally {
1595 t.release(0);
1596 }
1597 if (len != 0)
1598 break;
1599 }
1600 else
1601 tab = (Node<V>[])fk;
1602 }
1603 else {
1604 synchronized (f) {
1605 if (tabAt(tab, i) == f) {
1606 len = 1;
1607 for (Node<V> e = f, pred = null;; ++len) {
1608 Object ek; V ev;
1609 if (e.hash == h &&
1610 (ev = e.val) != null &&
1611 ((ek = e.key) == k || k.equals(ek))) {
1612 val = mf.apply(ev, v);
1613 if (val != null)
1614 e.val = val;
1615 else {
1616 delta = -1;
1617 Node<V> en = e.next;
1618 if (pred != null)
1619 pred.next = en;
1620 else
1621 setTabAt(tab, i, en);
1622 }
1623 break;
1624 }
1625 pred = e;
1626 if ((e = e.next) == null) {
1627 val = v;
1628 pred.next = new Node<V>(h, k, val, null);
1629 delta = 1;
1630 if (len >= TREE_THRESHOLD)
1631 replaceWithTreeBin(tab, i, k);
1632 break;
1633 }
1634 }
1635 }
1636 }
1637 if (len != 0)
1638 break;
1639 }
1640 }
1641 if (delta != 0)
1642 addCount((long)delta, len);
1643 return val;
1644 }
1645
1646 /** Implementation for putAll */
1647 @SuppressWarnings("unchecked") private final void internalPutAll
1648 (Map<? extends K, ? extends V> m) {
1649 tryPresize(m.size());
1650 long delta = 0L; // number of uncommitted additions
1651 boolean npe = false; // to throw exception on exit for nulls
1652 try { // to clean up counts on other exceptions
1653 for (Map.Entry<?, ? extends V> entry : m.entrySet()) {
1654 Object k; V v;
1655 if (entry == null || (k = entry.getKey()) == null ||
1656 (v = entry.getValue()) == null) {
1657 npe = true;
1658 break;
1659 }
1660 int h = spread(k.hashCode());
1661 for (Node<V>[] tab = table;;) {
1662 int i; Node<V> f; int fh; Object fk;
1663 if (tab == null)
1664 tab = initTable();
1665 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null){
1666 if (casTabAt(tab, i, null, new Node<V>(h, k, v, null))) {
1667 ++delta;
1668 break;
1669 }
1670 }
1671 else if ((fh = f.hash) < 0) {
1672 if ((fk = f.key) instanceof TreeBin) {
1673 TreeBin<V> t = (TreeBin<V>)fk;
1674 boolean validated = false;
1675 t.acquire(0);
1676 try {
1677 if (tabAt(tab, i) == f) {
1678 validated = true;
1679 TreeNode<V> p = t.getTreeNode(h, k, t.root);
1680 if (p != null)
1681 p.val = v;
1682 else {
1683 t.putTreeNode(h, k, v);
1684 ++delta;
1685 }
1686 }
1687 } finally {
1688 t.release(0);
1689 }
1690 if (validated)
1691 break;
1692 }
1693 else
1694 tab = (Node<V>[])fk;
1695 }
1696 else {
1697 int len = 0;
1698 synchronized (f) {
1699 if (tabAt(tab, i) == f) {
1700 len = 1;
1701 for (Node<V> e = f;; ++len) {
1702 Object ek; V ev;
1703 if (e.hash == h &&
1704 (ev = e.val) != null &&
1705 ((ek = e.key) == k || k.equals(ek))) {
1706 e.val = v;
1707 break;
1708 }
1709 Node<V> last = e;
1710 if ((e = e.next) == null) {
1711 ++delta;
1712 last.next = new Node<V>(h, k, v, null);
1713 if (len >= TREE_THRESHOLD)
1714 replaceWithTreeBin(tab, i, k);
1715 break;
1716 }
1717 }
1718 }
1719 }
1720 if (len != 0) {
1721 if (len > 1)
1722 addCount(delta, len);
1723 break;
1724 }
1725 }
1726 }
1727 }
1728 } finally {
1729 if (delta != 0L)
1730 addCount(delta, 2);
1731 }
1732 if (npe)
1733 throw new NullPointerException();
1734 }
1735
1736 /**
1737 * Implementation for clear. Steps through each bin, removing all
1738 * nodes.
1739 */
1740 @SuppressWarnings("unchecked") private final void internalClear() {
1741 long delta = 0L; // negative number of deletions
1742 int i = 0;
1743 Node<V>[] tab = table;
1744 while (tab != null && i < tab.length) {
1745 Node<V> f = tabAt(tab, i);
1746 if (f == null)
1747 ++i;
1748 else if (f.hash < 0) {
1749 Object fk;
1750 if ((fk = f.key) instanceof TreeBin) {
1751 TreeBin<V> t = (TreeBin<V>)fk;
1752 t.acquire(0);
1753 try {
1754 if (tabAt(tab, i) == f) {
1755 for (Node<V> p = t.first; p != null; p = p.next) {
1756 if (p.val != null) { // (currently always true)
1757 p.val = null;
1758 --delta;
1759 }
1760 }
1761 t.first = null;
1762 t.root = null;
1763 ++i;
1764 }
1765 } finally {
1766 t.release(0);
1767 }
1768 }
1769 else
1770 tab = (Node<V>[])fk;
1771 }
1772 else {
1773 synchronized (f) {
1774 if (tabAt(tab, i) == f) {
1775 for (Node<V> e = f; e != null; e = e.next) {
1776 if (e.val != null) { // (currently always true)
1777 e.val = null;
1778 --delta;
1779 }
1780 }
1781 setTabAt(tab, i, null);
1782 ++i;
1783 }
1784 }
1785 }
1786 }
1787 if (delta != 0L)
1788 addCount(delta, -1);
1789 }
1790
1791 /* ---------------- Table Initialization and Resizing -------------- */
1792
1793 /**
1794 * Returns a power of two table size for the given desired capacity.
1795 * See Hackers Delight, sec 3.2
1796 */
1797 private static final int tableSizeFor(int c) {
1798 int n = c - 1;
1799 n |= n >>> 1;
1800 n |= n >>> 2;
1801 n |= n >>> 4;
1802 n |= n >>> 8;
1803 n |= n >>> 16;
1804 return (n < 0) ? 1 : (n >= MAXIMUM_CAPACITY) ? MAXIMUM_CAPACITY : n + 1;
1805 }
1806
1807 /**
1808 * Initializes table, using the size recorded in sizeCtl.
1809 */
1810 @SuppressWarnings("unchecked") private final Node<V>[] initTable() {
1811 Node<V>[] tab; int sc;
1812 while ((tab = table) == null) {
1813 if ((sc = sizeCtl) < 0)
1814 Thread.yield(); // lost initialization race; just spin
1815 else if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
1816 try {
1817 if ((tab = table) == null) {
1818 int n = (sc > 0) ? sc : DEFAULT_CAPACITY;
1819 @SuppressWarnings("rawtypes") Node[] tb = new Node[n];
1820 table = tab = (Node<V>[])tb;
1821 sc = n - (n >>> 2);
1822 }
1823 } finally {
1824 sizeCtl = sc;
1825 }
1826 break;
1827 }
1828 }
1829 return tab;
1830 }
1831
1832 /**
1833 * Adds to count, and if table is too small and not already
1834 * resizing, initiates transfer. If already resizing, helps
1835 * perform transfer if work is available. Rechecks occupancy
1836 * after a transfer to see if another resize is already needed
1837 * because resizings are lagging additions.
1838 *
1839 * @param x the count to add
1840 * @param check if <0, don't check resize, if <= 1 only check if uncontended
1841 */
1842 private final void addCount(long x, int check) {
1843 Cell[] as; long b, s;
1844 if ((as = counterCells) != null ||
1845 !U.compareAndSwapLong(this, BASECOUNT, b = baseCount, s = b + x)) {
1846 CellHashCode hc; Cell a; long v; int m;
1847 boolean uncontended = true;
1848 if ((hc = threadCellHashCode.get()) == null ||
1849 as == null || (m = as.length - 1) < 0 ||
1850 (a = as[m & hc.code]) == null ||
1851 !(uncontended =
1852 U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))) {
1853 fullAddCount(x, hc, uncontended);
1854 return;
1855 }
1856 if (check <= 1)
1857 return;
1858 s = sumCount();
1859 }
1860 if (check >= 0) {
1861 Node<V>[] tab, nt; int sc;
1862 while (s >= (long)(sc = sizeCtl) && (tab = table) != null &&
1863 tab.length < MAXIMUM_CAPACITY) {
1864 if (sc < 0) {
1865 if (sc == -1 || transferIndex <= transferOrigin ||
1866 (nt = nextTable) == null)
1867 break;
1868 if (U.compareAndSwapInt(this, SIZECTL, sc, sc - 1))
1869 transfer(tab, nt);
1870 }
1871 else if (U.compareAndSwapInt(this, SIZECTL, sc, -2))
1872 transfer(tab, null);
1873 s = sumCount();
1874 }
1875 }
1876 }
1877
1878 /**
1879 * Tries to presize table to accommodate the given number of elements.
1880 *
1881 * @param size number of elements (doesn't need to be perfectly accurate)
1882 */
1883 @SuppressWarnings("unchecked") private final void tryPresize(int size) {
1884 int c = (size >= (MAXIMUM_CAPACITY >>> 1)) ? MAXIMUM_CAPACITY :
1885 tableSizeFor(size + (size >>> 1) + 1);
1886 int sc;
1887 while ((sc = sizeCtl) >= 0) {
1888 Node<V>[] tab = table; int n;
1889 if (tab == null || (n = tab.length) == 0) {
1890 n = (sc > c) ? sc : c;
1891 if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
1892 try {
1893 if (table == tab) {
1894 @SuppressWarnings("rawtypes") Node[] tb = new Node[n];
1895 table = (Node<V>[])tb;
1896 sc = n - (n >>> 2);
1897 }
1898 } finally {
1899 sizeCtl = sc;
1900 }
1901 }
1902 }
1903 else if (c <= sc || n >= MAXIMUM_CAPACITY)
1904 break;
1905 else if (tab == table &&
1906 U.compareAndSwapInt(this, SIZECTL, sc, -2))
1907 transfer(tab, null);
1908 }
1909 }
1910
1911 /*
1912 * Moves and/or copies the nodes in each bin to new table. See
1913 * above for explanation.
1914 */
1915 @SuppressWarnings("unchecked") private final void transfer
1916 (Node<V>[] tab, Node<V>[] nextTab) {
1917 int n = tab.length, stride;
1918 if ((stride = (NCPU > 1) ? (n >>> 3) / NCPU : n) < MIN_TRANSFER_STRIDE)
1919 stride = MIN_TRANSFER_STRIDE; // subdivide range
1920 if (nextTab == null) { // initiating
1921 try {
1922 @SuppressWarnings("rawtypes") Node[] tb = new Node[n << 1];
1923 nextTab = (Node<V>[])tb;
1924 } catch (Throwable ex) { // try to cope with OOME
1925 sizeCtl = Integer.MAX_VALUE;
1926 return;
1927 }
1928 nextTable = nextTab;
1929 transferOrigin = n;
1930 transferIndex = n;
1931 Node<V> rev = new Node<V>(MOVED, tab, null, null);
1932 for (int k = n; k > 0;) { // progressively reveal ready slots
1933 int nextk = (k > stride) ? k - stride : 0;
1934 for (int m = nextk; m < k; ++m)
1935 nextTab[m] = rev;
1936 for (int m = n + nextk; m < n + k; ++m)
1937 nextTab[m] = rev;
1938 U.putOrderedInt(this, TRANSFERORIGIN, k = nextk);
1939 }
1940 }
1941 int nextn = nextTab.length;
1942 Node<V> fwd = new Node<V>(MOVED, nextTab, null, null);
1943 boolean advance = true;
1944 for (int i = 0, bound = 0;;) {
1945 int nextIndex, nextBound; Node<V> f; Object fk;
1946 while (advance) {
1947 if (--i >= bound)
1948 advance = false;
1949 else if ((nextIndex = transferIndex) <= transferOrigin) {
1950 i = -1;
1951 advance = false;
1952 }
1953 else if (U.compareAndSwapInt
1954 (this, TRANSFERINDEX, nextIndex,
1955 nextBound = (nextIndex > stride ?
1956 nextIndex - stride : 0))) {
1957 bound = nextBound;
1958 i = nextIndex - 1;
1959 advance = false;
1960 }
1961 }
1962 if (i < 0 || i >= n || i + n >= nextn) {
1963 for (int sc;;) {
1964 if (U.compareAndSwapInt(this, SIZECTL, sc = sizeCtl, ++sc)) {
1965 if (sc == -1) {
1966 nextTable = null;
1967 table = nextTab;
1968 sizeCtl = (n << 1) - (n >>> 1);
1969 }
1970 return;
1971 }
1972 }
1973 }
1974 else if ((f = tabAt(tab, i)) == null) {
1975 if (casTabAt(tab, i, null, fwd)) {
1976 setTabAt(nextTab, i, null);
1977 setTabAt(nextTab, i + n, null);
1978 advance = true;
1979 }
1980 }
1981 else if (f.hash >= 0) {
1982 synchronized (f) {
1983 if (tabAt(tab, i) == f) {
1984 int runBit = f.hash & n;
1985 Node<V> lastRun = f, lo = null, hi = null;
1986 for (Node<V> p = f.next; p != null; p = p.next) {
1987 int b = p.hash & n;
1988 if (b != runBit) {
1989 runBit = b;
1990 lastRun = p;
1991 }
1992 }
1993 if (runBit == 0)
1994 lo = lastRun;
1995 else
1996 hi = lastRun;
1997 for (Node<V> p = f; p != lastRun; p = p.next) {
1998 int ph = p.hash;
1999 Object pk = p.key; V pv = p.val;
2000 if ((ph & n) == 0)
2001 lo = new Node<V>(ph, pk, pv, lo);
2002 else
2003 hi = new Node<V>(ph, pk, pv, hi);
2004 }
2005 setTabAt(nextTab, i, lo);
2006 setTabAt(nextTab, i + n, hi);
2007 setTabAt(tab, i, fwd);
2008 advance = true;
2009 }
2010 }
2011 }
2012 else if ((fk = f.key) instanceof TreeBin) {
2013 TreeBin<V> t = (TreeBin<V>)fk;
2014 t.acquire(0);
2015 try {
2016 if (tabAt(tab, i) == f) {
2017 TreeBin<V> lt = new TreeBin<V>();
2018 TreeBin<V> ht = new TreeBin<V>();
2019 int lc = 0, hc = 0;
2020 for (Node<V> e = t.first; e != null; e = e.next) {
2021 int h = e.hash;
2022 Object k = e.key; V v = e.val;
2023 if ((h & n) == 0) {
2024 ++lc;
2025 lt.putTreeNode(h, k, v);
2026 }
2027 else {
2028 ++hc;
2029 ht.putTreeNode(h, k, v);
2030 }
2031 }
2032 Node<V> ln, hn; // throw away trees if too small
2033 if (lc < TREE_THRESHOLD) {
2034 ln = null;
2035 for (Node<V> p = lt.first; p != null; p = p.next)
2036 ln = new Node<V>(p.hash, p.key, p.val, ln);
2037 }
2038 else
2039 ln = new Node<V>(MOVED, lt, null, null);
2040 setTabAt(nextTab, i, ln);
2041 if (hc < TREE_THRESHOLD) {
2042 hn = null;
2043 for (Node<V> p = ht.first; p != null; p = p.next)
2044 hn = new Node<V>(p.hash, p.key, p.val, hn);
2045 }
2046 else
2047 hn = new Node<V>(MOVED, ht, null, null);
2048 setTabAt(nextTab, i + n, hn);
2049 setTabAt(tab, i, fwd);
2050 advance = true;
2051 }
2052 } finally {
2053 t.release(0);
2054 }
2055 }
2056 else
2057 advance = true; // already processed
2058 }
2059 }
2060
2061 /* ---------------- Counter support -------------- */
2062
2063 final long sumCount() {
2064 Cell[] as = counterCells; Cell a;
2065 long sum = baseCount;
2066 if (as != null) {
2067 for (int i = 0; i < as.length; ++i) {
2068 if ((a = as[i]) != null)
2069 sum += a.value;
2070 }
2071 }
2072 return sum;
2073 }
2074
2075 // See LongAdder version for explanation
2076 private final void fullAddCount(long x, CellHashCode hc,
2077 boolean wasUncontended) {
2078 int h;
2079 if (hc == null) {
2080 hc = new CellHashCode();
2081 int s = cellHashCodeGenerator.addAndGet(SEED_INCREMENT);
2082 h = hc.code = (s == 0) ? 1 : s; // Avoid zero
2083 threadCellHashCode.set(hc);
2084 }
2085 else
2086 h = hc.code;
2087 boolean collide = false; // True if last slot nonempty
2088 for (;;) {
2089 Cell[] as; Cell a; int n; long v;
2090 if ((as = counterCells) != null && (n = as.length) > 0) {
2091 if ((a = as[(n - 1) & h]) == null) {
2092 if (cellsBusy == 0) { // Try to attach new Cell
2093 Cell r = new Cell(x); // Optimistic create
2094 if (cellsBusy == 0 &&
2095 U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
2096 boolean created = false;
2097 try { // Recheck under lock
2098 Cell[] rs; int m, j;
2099 if ((rs = counterCells) != null &&
2100 (m = rs.length) > 0 &&
2101 rs[j = (m - 1) & h] == null) {
2102 rs[j] = r;
2103 created = true;
2104 }
2105 } finally {
2106 cellsBusy = 0;
2107 }
2108 if (created)
2109 break;
2110 continue; // Slot is now non-empty
2111 }
2112 }
2113 collide = false;
2114 }
2115 else if (!wasUncontended) // CAS already known to fail
2116 wasUncontended = true; // Continue after rehash
2117 else if (U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))
2118 break;
2119 else if (counterCells != as || n >= NCPU)
2120 collide = false; // At max size or stale
2121 else if (!collide)
2122 collide = true;
2123 else if (cellsBusy == 0 &&
2124 U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
2125 try {
2126 if (counterCells == as) {// Expand table unless stale
2127 Cell[] rs = new Cell[n << 1];
2128 for (int i = 0; i < n; ++i)
2129 rs[i] = as[i];
2130 counterCells = rs;
2131 }
2132 } finally {
2133 cellsBusy = 0;
2134 }
2135 collide = false;
2136 continue; // Retry with expanded table
2137 }
2138 h ^= h << 13; // Rehash
2139 h ^= h >>> 17;
2140 h ^= h << 5;
2141 }
2142 else if (cellsBusy == 0 && counterCells == as &&
2143 U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
2144 boolean init = false;
2145 try { // Initialize table
2146 if (counterCells == as) {
2147 Cell[] rs = new Cell[2];
2148 rs[h & 1] = new Cell(x);
2149 counterCells = rs;
2150 init = true;
2151 }
2152 } finally {
2153 cellsBusy = 0;
2154 }
2155 if (init)
2156 break;
2157 }
2158 else if (U.compareAndSwapLong(this, BASECOUNT, v = baseCount, v + x))
2159 break; // Fall back on using base
2160 }
2161 hc.code = h; // Record index for next time
2162 }
2163
2164 /* ----------------Table Traversal -------------- */
2165
2166 /**
2167 * Encapsulates traversal for methods such as containsValue; also
2168 * serves as a base class for other iterators and bulk tasks.
2169 *
2170 * At each step, the iterator snapshots the key ("nextKey") and
2171 * value ("nextVal") of a valid node (i.e., one that, at point of
2172 * snapshot, has a non-null user value). Because val fields can
2173 * change (including to null, indicating deletion), field nextVal
2174 * might not be accurate at point of use, but still maintains the
2175 * weak consistency property of holding a value that was once
2176 * valid. To support iterator.remove, the nextKey field is not
2177 * updated (nulled out) when the iterator cannot advance.
2178 *
2179 * Internal traversals directly access these fields, as in:
2180 * {@code while (it.advance() != null) { process(it.nextKey); }}
2181 *
2182 * Exported iterators must track whether the iterator has advanced
2183 * (in hasNext vs next) (by setting/checking/nulling field
2184 * nextVal), and then extract key, value, or key-value pairs as
2185 * return values of next().
2186 *
2187 * The iterator visits once each still-valid node that was
2188 * reachable upon iterator construction. It might miss some that
2189 * were added to a bin after the bin was visited, which is OK wrt
2190 * consistency guarantees. Maintaining this property in the face
2191 * of possible ongoing resizes requires a fair amount of
2192 * bookkeeping state that is difficult to optimize away amidst
2193 * volatile accesses. Even so, traversal maintains reasonable
2194 * throughput.
2195 *
2196 * Normally, iteration proceeds bin-by-bin traversing lists.
2197 * However, if the table has been resized, then all future steps
2198 * must traverse both the bin at the current index as well as at
2199 * (index + baseSize); and so on for further resizings. To
2200 * paranoically cope with potential sharing by users of iterators
2201 * across threads, iteration terminates if a bounds checks fails
2202 * for a table read.
2203 *
2204 * This class supports both Spliterator-based traversal and
2205 * CountedCompleter-based bulk tasks. The same "batch" field is
2206 * used, but in slightly different ways, in the two cases. For
2207 * Spliterators, it is a saturating (at Integer.MAX_VALUE)
2208 * estimate of element coverage. For CHM tasks, it is a pre-scaled
2209 * size that halves down to zero for leaf tasks, that is only
2210 * computed upon execution of the task. (Tasks can be submitted to
2211 * any pool, of any size, so we don't know scale factors until
2212 * running.)
2213 *
2214 * This class extends CountedCompleter to streamline parallel
2215 * iteration in bulk operations. This adds only a few fields of
2216 * space overhead, which is small enough in cases where it is not
2217 * needed to not worry about it. Because CountedCompleter is
2218 * Serializable, but iterators need not be, we need to add warning
2219 * suppressions.
2220 */
2221 @SuppressWarnings("serial") static class Traverser<K,V,R>
2222 extends CountedCompleter<R> {
2223 final ConcurrentHashMap<K, V> map;
2224 Node<V> next; // the next entry to use
2225 Object nextKey; // cached key field of next
2226 V nextVal; // cached val field of next
2227 Node<V>[] tab; // current table; updated if resized
2228 int index; // index of bin to use next
2229 int baseIndex; // current index of initial table
2230 int baseLimit; // index bound for initial table
2231 int baseSize; // initial table size
2232 int batch; // split control
2233 /** Creates iterator for all entries in the table. */
2234 Traverser(ConcurrentHashMap<K, V> map) {
2235 this.map = map;
2236 Node<V>[] t;
2237 if ((t = tab = map.table) != null)
2238 baseLimit = baseSize = t.length;
2239 }
2240
2241 /** Task constructor */
2242 Traverser(ConcurrentHashMap<K,V> map, Traverser<K,V,?> it, int batch) {
2243 super(it);
2244 this.map = map;
2245 this.batch = batch; // -1 if unknown
2246 if (it == null) {
2247 Node<V>[] t;
2248 if ((t = tab = map.table) != null)
2249 baseLimit = baseSize = t.length;
2250 }
2251 else { // split parent
2252 this.tab = it.tab;
2253 this.baseSize = it.baseSize;
2254 int hi = this.baseLimit = it.baseLimit;
2255 it.baseLimit = this.index = this.baseIndex =
2256 (hi + it.baseIndex + 1) >>> 1;
2257 }
2258 }
2259
2260 /** Spliterator constructor */
2261 Traverser(ConcurrentHashMap<K,V> map, Traverser<K,V,?> it) {
2262 super(it);
2263 this.map = map;
2264 if (it == null) {
2265 Node<V>[] t;
2266 if ((t = tab = map.table) != null)
2267 baseLimit = baseSize = t.length;
2268 long n = map.sumCount();
2269 batch = ((n > (long)Integer.MAX_VALUE) ? Integer.MAX_VALUE :
2270 (int)n);
2271 }
2272 else {
2273 this.tab = it.tab;
2274 this.baseSize = it.baseSize;
2275 int hi = this.baseLimit = it.baseLimit;
2276 it.baseLimit = this.index = this.baseIndex =
2277 (hi + it.baseIndex + 1) >>> 1;
2278 this.batch = it.batch >>>= 1;
2279 }
2280 }
2281
2282 /**
2283 * Advances next; returns nextVal or null if terminated.
2284 * See above for explanation.
2285 */
2286 @SuppressWarnings("unchecked") final V advance() {
2287 Node<V> e = next;
2288 V ev = null;
2289 outer: do {
2290 if (e != null) // advance past used/skipped node
2291 e = e.next;
2292 while (e == null) { // get to next non-null bin
2293 ConcurrentHashMap<K, V> m;
2294 Node<V>[] t; int b, i, n; Object ek; // must use locals
2295 if ((t = tab) != null)
2296 n = t.length;
2297 else if ((m = map) != null && (t = tab = m.table) != null)
2298 n = baseLimit = baseSize = t.length;
2299 else
2300 break outer;
2301 if ((b = baseIndex) >= baseLimit ||
2302 (i = index) < 0 || i >= n)
2303 break outer;
2304 if ((e = tabAt(t, i)) != null && e.hash < 0) {
2305 if ((ek = e.key) instanceof TreeBin)
2306 e = ((TreeBin<V>)ek).first;
2307 else {
2308 tab = (Node<V>[])ek;
2309 continue; // restarts due to null val
2310 }
2311 } // visit upper slots if present
2312 index = (i += baseSize) < n ? i : (baseIndex = b + 1);
2313 }
2314 nextKey = e.key;
2315 } while ((ev = e.val) == null); // skip deleted or special nodes
2316 next = e;
2317 return nextVal = ev;
2318 }
2319
2320 public final void remove() {
2321 Object k = nextKey;
2322 if (k == null && (advance() == null || (k = nextKey) == null))
2323 throw new IllegalStateException();
2324 map.internalReplace(k, null, null);
2325 }
2326
2327 public final boolean hasNext() {
2328 return nextVal != null || advance() != null;
2329 }
2330
2331 public final boolean hasMoreElements() { return hasNext(); }
2332
2333 public void compute() { } // default no-op CountedCompleter body
2334
2335 /**
2336 * Returns a batch value > 0 if this task should (and must) be
2337 * split, if so, adding to pending count, and in any case
2338 * updating batch value. The initial batch value is approx
2339 * exp2 of the number of times (minus one) to split task by
2340 * two before executing leaf action. This value is faster to
2341 * compute and more convenient to use as a guide to splitting
2342 * than is the depth, since it is used while dividing by two
2343 * anyway.
2344 */
2345 final int preSplit() {
2346 int b; ForkJoinPool pool;
2347 if ((b = batch) < 0) { // force initialization
2348 int sp = (((pool = getPool()) == null) ?
2349 ForkJoinPool.getCommonPoolParallelism() :
2350 pool.getParallelism()) << 3; // slack of 8
2351 long n = map.sumCount();
2352 b = (n <= 0L) ? 0 : (n < (long)sp) ? (int)n : sp;
2353 }
2354 b = (b <= 1 || baseIndex == baseLimit) ? 0 : (b >>> 1);
2355 if ((batch = b) > 0)
2356 addToPendingCount(1);
2357 return b;
2358 }
2359
2360 // spliterator support
2361
2362 public long exactSizeIfKnown() {
2363 return -1;
2364 }
2365
2366 public boolean hasExactSplits() {
2367 return false;
2368 }
2369
2370 public long estimateSize() {
2371 return batch;
2372 }
2373 }
2374
2375 /* ---------------- Public operations -------------- */
2376
2377 /**
2378 * Creates a new, empty map with the default initial table size (16).
2379 */
2380 public ConcurrentHashMap() {
2381 }
2382
2383 /**
2384 * Creates a new, empty map with an initial table size
2385 * accommodating the specified number of elements without the need
2386 * to dynamically resize.
2387 *
2388 * @param initialCapacity The implementation performs internal
2389 * sizing to accommodate this many elements.
2390 * @throws IllegalArgumentException if the initial capacity of
2391 * elements is negative
2392 */
2393 public ConcurrentHashMap(int initialCapacity) {
2394 if (initialCapacity < 0)
2395 throw new IllegalArgumentException();
2396 int cap = ((initialCapacity >= (MAXIMUM_CAPACITY >>> 1)) ?
2397 MAXIMUM_CAPACITY :
2398 tableSizeFor(initialCapacity + (initialCapacity >>> 1) + 1));
2399 this.sizeCtl = cap;
2400 }
2401
2402 /**
2403 * Creates a new map with the same mappings as the given map.
2404 *
2405 * @param m the map
2406 */
2407 public ConcurrentHashMap(Map<? extends K, ? extends V> m) {
2408 this.sizeCtl = DEFAULT_CAPACITY;
2409 internalPutAll(m);
2410 }
2411
2412 /**
2413 * Creates a new, empty map with an initial table size based on
2414 * the given number of elements ({@code initialCapacity}) and
2415 * initial table density ({@code loadFactor}).
2416 *
2417 * @param initialCapacity the initial capacity. The implementation
2418 * performs internal sizing to accommodate this many elements,
2419 * given the specified load factor.
2420 * @param loadFactor the load factor (table density) for
2421 * establishing the initial table size
2422 * @throws IllegalArgumentException if the initial capacity of
2423 * elements is negative or the load factor is nonpositive
2424 *
2425 * @since 1.6
2426 */
2427 public ConcurrentHashMap(int initialCapacity, float loadFactor) {
2428 this(initialCapacity, loadFactor, 1);
2429 }
2430
2431 /**
2432 * Creates a new, empty map with an initial table size based on
2433 * the given number of elements ({@code initialCapacity}), table
2434 * density ({@code loadFactor}), and number of concurrently
2435 * updating threads ({@code concurrencyLevel}).
2436 *
2437 * @param initialCapacity the initial capacity. The implementation
2438 * performs internal sizing to accommodate this many elements,
2439 * given the specified load factor.
2440 * @param loadFactor the load factor (table density) for
2441 * establishing the initial table size
2442 * @param concurrencyLevel the estimated number of concurrently
2443 * updating threads. The implementation may use this value as
2444 * a sizing hint.
2445 * @throws IllegalArgumentException if the initial capacity is
2446 * negative or the load factor or concurrencyLevel are
2447 * nonpositive
2448 */
2449 public ConcurrentHashMap(int initialCapacity,
2450 float loadFactor, int concurrencyLevel) {
2451 if (!(loadFactor > 0.0f) || initialCapacity < 0 || concurrencyLevel <= 0)
2452 throw new IllegalArgumentException();
2453 if (initialCapacity < concurrencyLevel) // Use at least as many bins
2454 initialCapacity = concurrencyLevel; // as estimated threads
2455 long size = (long)(1.0 + (long)initialCapacity / loadFactor);
2456 int cap = (size >= (long)MAXIMUM_CAPACITY) ?
2457 MAXIMUM_CAPACITY : tableSizeFor((int)size);
2458 this.sizeCtl = cap;
2459 }
2460
2461 /**
2462 * Creates a new {@link Set} backed by a ConcurrentHashMap
2463 * from the given type to {@code Boolean.TRUE}.
2464 *
2465 * @return the new set
2466 */
2467 public static <K> KeySetView<K,Boolean> newKeySet() {
2468 return new KeySetView<K,Boolean>(new ConcurrentHashMap<K,Boolean>(),
2469 Boolean.TRUE);
2470 }
2471
2472 /**
2473 * Creates a new {@link Set} backed by a ConcurrentHashMap
2474 * from the given type to {@code Boolean.TRUE}.
2475 *
2476 * @param initialCapacity The implementation performs internal
2477 * sizing to accommodate this many elements.
2478 * @throws IllegalArgumentException if the initial capacity of
2479 * elements is negative
2480 * @return the new set
2481 */
2482 public static <K> KeySetView<K,Boolean> newKeySet(int initialCapacity) {
2483 return new KeySetView<K,Boolean>
2484 (new ConcurrentHashMap<K,Boolean>(initialCapacity), Boolean.TRUE);
2485 }
2486
2487 /**
2488 * {@inheritDoc}
2489 */
2490 public boolean isEmpty() {
2491 return sumCount() <= 0L; // ignore transient negative values
2492 }
2493
2494 /**
2495 * {@inheritDoc}
2496 */
2497 public int size() {
2498 long n = sumCount();
2499 return ((n < 0L) ? 0 :
2500 (n > (long)Integer.MAX_VALUE) ? Integer.MAX_VALUE :
2501 (int)n);
2502 }
2503
2504 /**
2505 * Returns the number of mappings. This method should be used
2506 * instead of {@link #size} because a ConcurrentHashMap may
2507 * contain more mappings than can be represented as an int. The
2508 * value returned is an estimate; the actual count may differ if
2509 * there are concurrent insertions or removals.
2510 *
2511 * @return the number of mappings
2512 */
2513 public long mappingCount() {
2514 long n = sumCount();
2515 return (n < 0L) ? 0L : n; // ignore transient negative values
2516 }
2517
2518 /**
2519 * Returns the value to which the specified key is mapped,
2520 * or {@code null} if this map contains no mapping for the key.
2521 *
2522 * <p>More formally, if this map contains a mapping from a key
2523 * {@code k} to a value {@code v} such that {@code key.equals(k)},
2524 * then this method returns {@code v}; otherwise it returns
2525 * {@code null}. (There can be at most one such mapping.)
2526 *
2527 * @throws NullPointerException if the specified key is null
2528 */
2529 public V get(Object key) {
2530 return internalGet(key);
2531 }
2532
2533 /**
2534 * Returns the value to which the specified key is mapped,
2535 * or the given defaultValue if this map contains no mapping for the key.
2536 *
2537 * @param key the key
2538 * @param defaultValue the value to return if this map contains
2539 * no mapping for the given key
2540 * @return the mapping for the key, if present; else the defaultValue
2541 * @throws NullPointerException if the specified key is null
2542 */
2543 public V getValueOrDefault(Object key, V defaultValue) {
2544 V v;
2545 return (v = internalGet(key)) == null ? defaultValue : v;
2546 }
2547
2548 /**
2549 * Tests if the specified object is a key in this table.
2550 *
2551 * @param key possible key
2552 * @return {@code true} if and only if the specified object
2553 * is a key in this table, as determined by the
2554 * {@code equals} method; {@code false} otherwise
2555 * @throws NullPointerException if the specified key is null
2556 */
2557 public boolean containsKey(Object key) {
2558 return internalGet(key) != null;
2559 }
2560
2561 /**
2562 * Returns {@code true} if this map maps one or more keys to the
2563 * specified value. Note: This method may require a full traversal
2564 * of the map, and is much slower than method {@code containsKey}.
2565 *
2566 * @param value value whose presence in this map is to be tested
2567 * @return {@code true} if this map maps one or more keys to the
2568 * specified value
2569 * @throws NullPointerException if the specified value is null
2570 */
2571 public boolean containsValue(Object value) {
2572 if (value == null)
2573 throw new NullPointerException();
2574 V v;
2575 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
2576 while ((v = it.advance()) != null) {
2577 if (v == value || value.equals(v))
2578 return true;
2579 }
2580 return false;
2581 }
2582
2583 /**
2584 * Legacy method testing if some key maps into the specified value
2585 * in this table. This method is identical in functionality to
2586 * {@link #containsValue}, and exists solely to ensure
2587 * full compatibility with class {@link java.util.Hashtable},
2588 * which supported this method prior to introduction of the
2589 * Java Collections framework.
2590 *
2591 * @param value a value to search for
2592 * @return {@code true} if and only if some key maps to the
2593 * {@code value} argument in this table as
2594 * determined by the {@code equals} method;
2595 * {@code false} otherwise
2596 * @throws NullPointerException if the specified value is null
2597 */
2598 @Deprecated public boolean contains(Object value) {
2599 return containsValue(value);
2600 }
2601
2602 /**
2603 * Maps the specified key to the specified value in this table.
2604 * Neither the key nor the value can be null.
2605 *
2606 * <p>The value can be retrieved by calling the {@code get} method
2607 * with a key that is equal to the original key.
2608 *
2609 * @param key key with which the specified value is to be associated
2610 * @param value value to be associated with the specified key
2611 * @return the previous value associated with {@code key}, or
2612 * {@code null} if there was no mapping for {@code key}
2613 * @throws NullPointerException if the specified key or value is null
2614 */
2615 public V put(K key, V value) {
2616 return internalPut(key, value, false);
2617 }
2618
2619 /**
2620 * {@inheritDoc}
2621 *
2622 * @return the previous value associated with the specified key,
2623 * or {@code null} if there was no mapping for the key
2624 * @throws NullPointerException if the specified key or value is null
2625 */
2626 public V putIfAbsent(K key, V value) {
2627 return internalPut(key, value, true);
2628 }
2629
2630 /**
2631 * Copies all of the mappings from the specified map to this one.
2632 * These mappings replace any mappings that this map had for any of the
2633 * keys currently in the specified map.
2634 *
2635 * @param m mappings to be stored in this map
2636 */
2637 public void putAll(Map<? extends K, ? extends V> m) {
2638 internalPutAll(m);
2639 }
2640
2641 /**
2642 * If the specified key is not already associated with a value (or
2643 * is mapped to {@code null}), attempts to compute its value using
2644 * the given mapping function and enters it into this map unless
2645 * {@code null}. The entire method invocation is performed
2646 * atomically, so the function is applied at most once per key.
2647 * Some attempted update operations on this map by other threads
2648 * may be blocked while computation is in progress, so the
2649 * computation should be short and simple, and must not attempt to
2650 * update any other mappings of this Map.
2651 *
2652 * @param key key with which the specified value is to be associated
2653 * @param mappingFunction the function to compute a value
2654 * @return the current (existing or computed) value associated with
2655 * the specified key, or null if the computed value is null
2656 * @throws NullPointerException if the specified key or mappingFunction
2657 * is null
2658 * @throws IllegalStateException if the computation detectably
2659 * attempts a recursive update to this map that would
2660 * otherwise never complete
2661 * @throws RuntimeException or Error if the mappingFunction does so,
2662 * in which case the mapping is left unestablished
2663 */
2664 public V computeIfAbsent
2665 (K key, Function<? super K, ? extends V> mappingFunction) {
2666 return internalComputeIfAbsent(key, mappingFunction);
2667 }
2668
2669 /**
2670 * If the value for the specified key is present and non-null,
2671 * attempts to compute a new mapping given the key and its current
2672 * mapped value. The entire method invocation is performed
2673 * atomically. Some attempted update operations on this map by
2674 * other threads may be blocked while computation is in progress,
2675 * so the computation should be short and simple, and must not
2676 * attempt to update any other mappings of this Map.
2677 *
2678 * @param key key with which the specified value is to be associated
2679 * @param remappingFunction the function to compute a value
2680 * @return the new value associated with the specified key, or null if none
2681 * @throws NullPointerException if the specified key or remappingFunction
2682 * is null
2683 * @throws IllegalStateException if the computation detectably
2684 * attempts a recursive update to this map that would
2685 * otherwise never complete
2686 * @throws RuntimeException or Error if the remappingFunction does so,
2687 * in which case the mapping is unchanged
2688 */
2689 public V computeIfPresent
2690 (K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
2691 return internalCompute(key, true, remappingFunction);
2692 }
2693
2694 /**
2695 * Attempts to compute a mapping for the specified key and its
2696 * current mapped value (or {@code null} if there is no current
2697 * mapping). The entire method invocation is performed atomically.
2698 * Some attempted update operations on this map by other threads
2699 * may be blocked while computation is in progress, so the
2700 * computation should be short and simple, and must not attempt to
2701 * update any other mappings of this Map.
2702 *
2703 * @param key key with which the specified value is to be associated
2704 * @param remappingFunction the function to compute a value
2705 * @return the new value associated with the specified key, or null if none
2706 * @throws NullPointerException if the specified key or remappingFunction
2707 * is null
2708 * @throws IllegalStateException if the computation detectably
2709 * attempts a recursive update to this map that would
2710 * otherwise never complete
2711 * @throws RuntimeException or Error if the remappingFunction does so,
2712 * in which case the mapping is unchanged
2713 */
2714 public V compute
2715 (K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
2716 return internalCompute(key, false, remappingFunction);
2717 }
2718
2719 /**
2720 * If the specified key is not already associated with a
2721 * (non-null) value, associates it with the given value.
2722 * Otherwise, replaces the value with the results of the given
2723 * remapping function, or removes if {@code null}. The entire
2724 * method invocation is performed atomically. Some attempted
2725 * update operations on this map by other threads may be blocked
2726 * while computation is in progress, so the computation should be
2727 * short and simple, and must not attempt to update any other
2728 * mappings of this Map.
2729 *
2730 * @param key key with which the specified value is to be associated
2731 * @param value the value to use if absent
2732 * @param remappingFunction the function to recompute a value if present
2733 * @return the new value associated with the specified key, or null if none
2734 * @throws NullPointerException if the specified key or the
2735 * remappingFunction is null
2736 * @throws RuntimeException or Error if the remappingFunction does so,
2737 * in which case the mapping is unchanged
2738 */
2739 public V merge
2740 (K key, V value,
2741 BiFunction<? super V, ? super V, ? extends V> remappingFunction) {
2742 return internalMerge(key, value, remappingFunction);
2743 }
2744
2745 /**
2746 * Removes the key (and its corresponding value) from this map.
2747 * This method does nothing if the key is not in the map.
2748 *
2749 * @param key the key that needs to be removed
2750 * @return the previous value associated with {@code key}, or
2751 * {@code null} if there was no mapping for {@code key}
2752 * @throws NullPointerException if the specified key is null
2753 */
2754 public V remove(Object key) {
2755 return internalReplace(key, null, null);
2756 }
2757
2758 /**
2759 * {@inheritDoc}
2760 *
2761 * @throws NullPointerException if the specified key is null
2762 */
2763 public boolean remove(Object key, Object value) {
2764 return value != null && internalReplace(key, null, value) != null;
2765 }
2766
2767 /**
2768 * {@inheritDoc}
2769 *
2770 * @throws NullPointerException if any of the arguments are null
2771 */
2772 public boolean replace(K key, V oldValue, V newValue) {
2773 if (key == null || oldValue == null || newValue == null)
2774 throw new NullPointerException();
2775 return internalReplace(key, newValue, oldValue) != null;
2776 }
2777
2778 /**
2779 * {@inheritDoc}
2780 *
2781 * @return the previous value associated with the specified key,
2782 * or {@code null} if there was no mapping for the key
2783 * @throws NullPointerException if the specified key or value is null
2784 */
2785 public V replace(K key, V value) {
2786 if (key == null || value == null)
2787 throw new NullPointerException();
2788 return internalReplace(key, value, null);
2789 }
2790
2791 /**
2792 * Removes all of the mappings from this map.
2793 */
2794 public void clear() {
2795 internalClear();
2796 }
2797
2798 /**
2799 * Returns a {@link Set} view of the keys contained in this map.
2800 * The set is backed by the map, so changes to the map are
2801 * reflected in the set, and vice-versa.
2802 *
2803 * @return the set view
2804 */
2805 public KeySetView<K,V> keySet() {
2806 KeySetView<K,V> ks = keySet;
2807 return (ks != null) ? ks : (keySet = new KeySetView<K,V>(this, null));
2808 }
2809
2810 /**
2811 * Returns a {@link Set} view of the keys in this map, using the
2812 * given common mapped value for any additions (i.e., {@link
2813 * Collection#add} and {@link Collection#addAll}). This is of
2814 * course only appropriate if it is acceptable to use the same
2815 * value for all additions from this view.
2816 *
2817 * @param mappedValue the mapped value to use for any
2818 * additions.
2819 * @return the set view
2820 * @throws NullPointerException if the mappedValue is null
2821 */
2822 public KeySetView<K,V> keySet(V mappedValue) {
2823 if (mappedValue == null)
2824 throw new NullPointerException();
2825 return new KeySetView<K,V>(this, mappedValue);
2826 }
2827
2828 /**
2829 * Returns a {@link Collection} view of the values contained in this map.
2830 * The collection is backed by the map, so changes to the map are
2831 * reflected in the collection, and vice-versa.
2832 */
2833 public ValuesView<K,V> values() {
2834 ValuesView<K,V> vs = values;
2835 return (vs != null) ? vs : (values = new ValuesView<K,V>(this));
2836 }
2837
2838 /**
2839 * Returns a {@link Set} view of the mappings contained in this map.
2840 * The set is backed by the map, so changes to the map are
2841 * reflected in the set, and vice-versa. The set supports element
2842 * removal, which removes the corresponding mapping from the map,
2843 * via the {@code Iterator.remove}, {@code Set.remove},
2844 * {@code removeAll}, {@code retainAll}, and {@code clear}
2845 * operations. It does not support the {@code add} or
2846 * {@code addAll} operations.
2847 *
2848 * <p>The view's {@code iterator} is a "weakly consistent" iterator
2849 * that will never throw {@link ConcurrentModificationException},
2850 * and guarantees to traverse elements as they existed upon
2851 * construction of the iterator, and may (but is not guaranteed to)
2852 * reflect any modifications subsequent to construction.
2853 */
2854 public Set<Map.Entry<K,V>> entrySet() {
2855 EntrySetView<K,V> es = entrySet;
2856 return (es != null) ? es : (entrySet = new EntrySetView<K,V>(this));
2857 }
2858
2859 /**
2860 * Returns an enumeration of the keys in this table.
2861 *
2862 * @return an enumeration of the keys in this table
2863 * @see #keySet()
2864 */
2865 public Enumeration<K> keys() {
2866 return new KeyIterator<K,V>(this);
2867 }
2868
2869 /**
2870 * Returns an enumeration of the values in this table.
2871 *
2872 * @return an enumeration of the values in this table
2873 * @see #values()
2874 */
2875 public Enumeration<V> elements() {
2876 return new ValueIterator<K,V>(this);
2877 }
2878
2879 /**
2880 * Returns the hash code value for this {@link Map}, i.e.,
2881 * the sum of, for each key-value pair in the map,
2882 * {@code key.hashCode() ^ value.hashCode()}.
2883 *
2884 * @return the hash code value for this map
2885 */
2886 public int hashCode() {
2887 int h = 0;
2888 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
2889 V v;
2890 while ((v = it.advance()) != null) {
2891 h += it.nextKey.hashCode() ^ v.hashCode();
2892 }
2893 return h;
2894 }
2895
2896 /**
2897 * Returns a string representation of this map. The string
2898 * representation consists of a list of key-value mappings (in no
2899 * particular order) enclosed in braces ("{@code {}}"). Adjacent
2900 * mappings are separated by the characters {@code ", "} (comma
2901 * and space). Each key-value mapping is rendered as the key
2902 * followed by an equals sign ("{@code =}") followed by the
2903 * associated value.
2904 *
2905 * @return a string representation of this map
2906 */
2907 public String toString() {
2908 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
2909 StringBuilder sb = new StringBuilder();
2910 sb.append('{');
2911 V v;
2912 if ((v = it.advance()) != null) {
2913 for (;;) {
2914 Object k = it.nextKey;
2915 sb.append(k == this ? "(this Map)" : k);
2916 sb.append('=');
2917 sb.append(v == this ? "(this Map)" : v);
2918 if ((v = it.advance()) == null)
2919 break;
2920 sb.append(',').append(' ');
2921 }
2922 }
2923 return sb.append('}').toString();
2924 }
2925
2926 /**
2927 * Compares the specified object with this map for equality.
2928 * Returns {@code true} if the given object is a map with the same
2929 * mappings as this map. This operation may return misleading
2930 * results if either map is concurrently modified during execution
2931 * of this method.
2932 *
2933 * @param o object to be compared for equality with this map
2934 * @return {@code true} if the specified object is equal to this map
2935 */
2936 public boolean equals(Object o) {
2937 if (o != this) {
2938 if (!(o instanceof Map))
2939 return false;
2940 Map<?,?> m = (Map<?,?>) o;
2941 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
2942 V val;
2943 while ((val = it.advance()) != null) {
2944 Object v = m.get(it.nextKey);
2945 if (v == null || (v != val && !v.equals(val)))
2946 return false;
2947 }
2948 for (Map.Entry<?,?> e : m.entrySet()) {
2949 Object mk, mv, v;
2950 if ((mk = e.getKey()) == null ||
2951 (mv = e.getValue()) == null ||
2952 (v = internalGet(mk)) == null ||
2953 (mv != v && !mv.equals(v)))
2954 return false;
2955 }
2956 }
2957 return true;
2958 }
2959
2960 /* ----------------Iterators -------------- */
2961
2962 @SuppressWarnings("serial") static final class KeyIterator<K,V>
2963 extends Traverser<K,V,Object>
2964 implements Spliterator<K>, Iterator<K>, Enumeration<K> {
2965 KeyIterator(ConcurrentHashMap<K, V> map) { super(map); }
2966 KeyIterator(ConcurrentHashMap<K, V> map, Traverser<K,V,Object> it) {
2967 super(map, it);
2968 }
2969 public KeyIterator<K,V> trySplit() {
2970 if (tab != null && baseIndex == baseLimit)
2971 return null;
2972 return new KeyIterator<K,V>(map, this);
2973 }
2974 @SuppressWarnings("unchecked") public final K next() {
2975 if (nextVal == null && advance() == null)
2976 throw new NoSuchElementException();
2977 Object k = nextKey;
2978 nextVal = null;
2979 return (K) k;
2980 }
2981
2982 public final K nextElement() { return next(); }
2983
2984 public Iterator<K> iterator() { return this; }
2985
2986 public void forEach(Block<? super K> action) {
2987 if (action == null) throw new NullPointerException();
2988 while (advance() != null)
2989 action.accept((K)nextKey);
2990 }
2991 }
2992
2993 @SuppressWarnings("serial") static final class ValueIterator<K,V>
2994 extends Traverser<K,V,Object>
2995 implements Spliterator<V>, Iterator<V>, Enumeration<V> {
2996 ValueIterator(ConcurrentHashMap<K, V> map) { super(map); }
2997 ValueIterator(ConcurrentHashMap<K, V> map, Traverser<K,V,Object> it) {
2998 super(map, it);
2999 }
3000 public ValueIterator<K,V> trySplit() {
3001 if (tab != null && baseIndex == baseLimit)
3002 return null;
3003 return new ValueIterator<K,V>(map, this);
3004 }
3005
3006 public final V next() {
3007 V v;
3008 if ((v = nextVal) == null && (v = advance()) == null)
3009 throw new NoSuchElementException();
3010 nextVal = null;
3011 return v;
3012 }
3013
3014 public final V nextElement() { return next(); }
3015
3016 public Iterator<V> iterator() { return this; }
3017
3018 public void forEach(Block<? super V> action) {
3019 if (action == null) throw new NullPointerException();
3020 V v;
3021 while ((v = advance()) != null)
3022 action.accept(v);
3023 }
3024 }
3025
3026 @SuppressWarnings("serial") static final class EntryIterator<K,V>
3027 extends Traverser<K,V,Object>
3028 implements Spliterator<Map.Entry<K,V>>, Iterator<Map.Entry<K,V>> {
3029 EntryIterator(ConcurrentHashMap<K, V> map) { super(map); }
3030 EntryIterator(ConcurrentHashMap<K, V> map, Traverser<K,V,Object> it) {
3031 super(map, it);
3032 }
3033 public EntryIterator<K,V> trySplit() {
3034 if (tab != null && baseIndex == baseLimit)
3035 return null;
3036 return new EntryIterator<K,V>(map, this);
3037 }
3038
3039 @SuppressWarnings("unchecked") public final Map.Entry<K,V> next() {
3040 V v;
3041 if ((v = nextVal) == null && (v = advance()) == null)
3042 throw new NoSuchElementException();
3043 Object k = nextKey;
3044 nextVal = null;
3045 return new MapEntry<K,V>((K)k, v, map);
3046 }
3047
3048 public Iterator<Map.Entry<K,V>> iterator() { return this; }
3049
3050 public void forEach(Block<? super Map.Entry<K,V>> action) {
3051 if (action == null) throw new NullPointerException();
3052 V v;
3053 while ((v = advance()) != null)
3054 action.accept(entryFor((K)nextKey, v));
3055 }
3056 }
3057
3058 /**
3059 * Exported Entry for iterators
3060 */
3061 static final class MapEntry<K,V> implements Map.Entry<K, V> {
3062 final K key; // non-null
3063 V val; // non-null
3064 final ConcurrentHashMap<K, V> map;
3065 MapEntry(K key, V val, ConcurrentHashMap<K, V> map) {
3066 this.key = key;
3067 this.val = val;
3068 this.map = map;
3069 }
3070 public final K getKey() { return key; }
3071 public final V getValue() { return val; }
3072 public final int hashCode() { return key.hashCode() ^ val.hashCode(); }
3073 public final String toString(){ return key + "=" + val; }
3074
3075 public final boolean equals(Object o) {
3076 Object k, v; Map.Entry<?,?> e;
3077 return ((o instanceof Map.Entry) &&
3078 (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
3079 (v = e.getValue()) != null &&
3080 (k == key || k.equals(key)) &&
3081 (v == val || v.equals(val)));
3082 }
3083
3084 /**
3085 * Sets our entry's value and writes through to the map. The
3086 * value to return is somewhat arbitrary here. Since we do not
3087 * necessarily track asynchronous changes, the most recent
3088 * "previous" value could be different from what we return (or
3089 * could even have been removed in which case the put will
3090 * re-establish). We do not and cannot guarantee more.
3091 */
3092 public final V setValue(V value) {
3093 if (value == null) throw new NullPointerException();
3094 V v = val;
3095 val = value;
3096 map.put(key, value);
3097 return v;
3098 }
3099 }
3100
3101 /**
3102 * Returns exportable snapshot entry for the given key and value
3103 * when write-through can't or shouldn't be used.
3104 */
3105 static <K,V> AbstractMap.SimpleEntry<K,V> entryFor(K k, V v) {
3106 return new AbstractMap.SimpleEntry<K,V>(k, v);
3107 }
3108
3109 /* ---------------- Serialization Support -------------- */
3110
3111 /**
3112 * Stripped-down version of helper class used in previous version,
3113 * declared for the sake of serialization compatibility
3114 */
3115 static class Segment<K,V> implements Serializable {
3116 private static final long serialVersionUID = 2249069246763182397L;
3117 final float loadFactor;
3118 Segment(float lf) { this.loadFactor = lf; }
3119 }
3120
3121 /**
3122 * Saves the state of the {@code ConcurrentHashMap} instance to a
3123 * stream (i.e., serializes it).
3124 * @param s the stream
3125 * @serialData
3126 * the key (Object) and value (Object)
3127 * for each key-value mapping, followed by a null pair.
3128 * The key-value mappings are emitted in no particular order.
3129 */
3130 @SuppressWarnings("unchecked") private void writeObject
3131 (java.io.ObjectOutputStream s)
3132 throws java.io.IOException {
3133 if (segments == null) { // for serialization compatibility
3134 segments = (Segment<K,V>[])
3135 new Segment<?,?>[DEFAULT_CONCURRENCY_LEVEL];
3136 for (int i = 0; i < segments.length; ++i)
3137 segments[i] = new Segment<K,V>(LOAD_FACTOR);
3138 }
3139 s.defaultWriteObject();
3140 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3141 V v;
3142 while ((v = it.advance()) != null) {
3143 s.writeObject(it.nextKey);
3144 s.writeObject(v);
3145 }
3146 s.writeObject(null);
3147 s.writeObject(null);
3148 segments = null; // throw away
3149 }
3150
3151 /**
3152 * Reconstitutes the instance from a stream (that is, deserializes it).
3153 * @param s the stream
3154 */
3155 @SuppressWarnings("unchecked") private void readObject
3156 (java.io.ObjectInputStream s)
3157 throws java.io.IOException, ClassNotFoundException {
3158 s.defaultReadObject();
3159 this.segments = null; // unneeded
3160
3161 // Create all nodes, then place in table once size is known
3162 long size = 0L;
3163 Node<V> p = null;
3164 for (;;) {
3165 K k = (K) s.readObject();
3166 V v = (V) s.readObject();
3167 if (k != null && v != null) {
3168 int h = spread(k.hashCode());
3169 p = new Node<V>(h, k, v, p);
3170 ++size;
3171 }
3172 else
3173 break;
3174 }
3175 if (p != null) {
3176 boolean init = false;
3177 int n;
3178 if (size >= (long)(MAXIMUM_CAPACITY >>> 1))
3179 n = MAXIMUM_CAPACITY;
3180 else {
3181 int sz = (int)size;
3182 n = tableSizeFor(sz + (sz >>> 1) + 1);
3183 }
3184 int sc = sizeCtl;
3185 boolean collide = false;
3186 if (n > sc &&
3187 U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
3188 try {
3189 if (table == null) {
3190 init = true;
3191 @SuppressWarnings("rawtypes") Node[] rt = new Node[n];
3192 Node<V>[] tab = (Node<V>[])rt;
3193 int mask = n - 1;
3194 while (p != null) {
3195 int j = p.hash & mask;
3196 Node<V> next = p.next;
3197 Node<V> q = p.next = tabAt(tab, j);
3198 setTabAt(tab, j, p);
3199 if (!collide && q != null && q.hash == p.hash)
3200 collide = true;
3201 p = next;
3202 }
3203 table = tab;
3204 addCount(size, -1);
3205 sc = n - (n >>> 2);
3206 }
3207 } finally {
3208 sizeCtl = sc;
3209 }
3210 if (collide) { // rescan and convert to TreeBins
3211 Node<V>[] tab = table;
3212 for (int i = 0; i < tab.length; ++i) {
3213 int c = 0;
3214 for (Node<V> e = tabAt(tab, i); e != null; e = e.next) {
3215 if (++c > TREE_THRESHOLD &&
3216 (e.key instanceof Comparable)) {
3217 replaceWithTreeBin(tab, i, e.key);
3218 break;
3219 }
3220 }
3221 }
3222 }
3223 }
3224 if (!init) { // Can only happen if unsafely published.
3225 while (p != null) {
3226 internalPut((K)p.key, p.val, false);
3227 p = p.next;
3228 }
3229 }
3230 }
3231 }
3232
3233 // -------------------------------------------------------
3234
3235 // Sequential bulk operations
3236
3237 /**
3238 * Performs the given action for each (key, value).
3239 *
3240 * @param action the action
3241 */
3242 @SuppressWarnings("unchecked") public void forEachSequentially
3243 (BiBlock<? super K, ? super V> action) {
3244 if (action == null) throw new NullPointerException();
3245 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3246 V v;
3247 while ((v = it.advance()) != null)
3248 action.accept((K)it.nextKey, v);
3249 }
3250
3251 /**
3252 * Performs the given action for each non-null transformation
3253 * of each (key, value).
3254 *
3255 * @param transformer a function returning the transformation
3256 * for an element, or null of there is no transformation (in
3257 * which case the action is not applied).
3258 * @param action the action
3259 */
3260 @SuppressWarnings("unchecked") public <U> void forEachSequentially
3261 (BiFunction<? super K, ? super V, ? extends U> transformer,
3262 Block<? super U> action) {
3263 if (transformer == null || action == null)
3264 throw new NullPointerException();
3265 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3266 V v; U u;
3267 while ((v = it.advance()) != null) {
3268 if ((u = transformer.apply((K)it.nextKey, v)) != null)
3269 action.accept(u);
3270 }
3271 }
3272
3273 /**
3274 * Returns a non-null result from applying the given search
3275 * function on each (key, value), or null if none.
3276 *
3277 * @param searchFunction a function returning a non-null
3278 * result on success, else null
3279 * @return a non-null result from applying the given search
3280 * function on each (key, value), or null if none
3281 */
3282 @SuppressWarnings("unchecked") public <U> U searchSequentially
3283 (BiFunction<? super K, ? super V, ? extends U> searchFunction) {
3284 if (searchFunction == null) throw new NullPointerException();
3285 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3286 V v; U u;
3287 while ((v = it.advance()) != null) {
3288 if ((u = searchFunction.apply((K)it.nextKey, v)) != null)
3289 return u;
3290 }
3291 return null;
3292 }
3293
3294 /**
3295 * Returns the result of accumulating the given transformation
3296 * of all (key, value) pairs using the given reducer to
3297 * combine values, or null if none.
3298 *
3299 * @param transformer a function returning the transformation
3300 * for an element, or null of there is no transformation (in
3301 * which case it is not combined).
3302 * @param reducer a commutative associative combining function
3303 * @return the result of accumulating the given transformation
3304 * of all (key, value) pairs
3305 */
3306 @SuppressWarnings("unchecked") public <U> U reduceSequentially
3307 (BiFunction<? super K, ? super V, ? extends U> transformer,
3308 BiFunction<? super U, ? super U, ? extends U> reducer) {
3309 if (transformer == null || reducer == null)
3310 throw new NullPointerException();
3311 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3312 U r = null, u; V v;
3313 while ((v = it.advance()) != null) {
3314 if ((u = transformer.apply((K)it.nextKey, v)) != null)
3315 r = (r == null) ? u : reducer.apply(r, u);
3316 }
3317 return r;
3318 }
3319
3320 /**
3321 * Returns the result of accumulating the given transformation
3322 * of all (key, value) pairs using the given reducer to
3323 * combine values, and the given basis as an identity value.
3324 *
3325 * @param transformer a function returning the transformation
3326 * for an element
3327 * @param basis the identity (initial default value) for the reduction
3328 * @param reducer a commutative associative combining function
3329 * @return the result of accumulating the given transformation
3330 * of all (key, value) pairs
3331 */
3332 @SuppressWarnings("unchecked") public double reduceToDoubleSequentially
3333 (DoubleBiFunction<? super K, ? super V> transformer,
3334 double basis,
3335 DoubleBinaryOperator reducer) {
3336 if (transformer == null || reducer == null)
3337 throw new NullPointerException();
3338 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3339 double r = basis; V v;
3340 while ((v = it.advance()) != null)
3341 r = reducer.applyAsDouble(r, transformer.applyAsDouble((K)it.nextKey, v));
3342 return r;
3343 }
3344
3345 /**
3346 * Returns the result of accumulating the given transformation
3347 * of all (key, value) pairs using the given reducer to
3348 * combine values, and the given basis as an identity value.
3349 *
3350 * @param transformer a function returning the transformation
3351 * for an element
3352 * @param basis the identity (initial default value) for the reduction
3353 * @param reducer a commutative associative combining function
3354 * @return the result of accumulating the given transformation
3355 * of all (key, value) pairs
3356 */
3357 @SuppressWarnings("unchecked") public long reduceToLongSequentially
3358 (LongBiFunction<? super K, ? super V> transformer,
3359 long basis,
3360 LongBinaryOperator reducer) {
3361 if (transformer == null || reducer == null)
3362 throw new NullPointerException();
3363 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3364 long r = basis; V v;
3365 while ((v = it.advance()) != null)
3366 r = reducer.applyAsLong(r, transformer.applyAsLong((K)it.nextKey, v));
3367 return r;
3368 }
3369
3370 /**
3371 * Returns the result of accumulating the given transformation
3372 * of all (key, value) pairs using the given reducer to
3373 * combine values, and the given basis as an identity value.
3374 *
3375 * @param transformer a function returning the transformation
3376 * for an element
3377 * @param basis the identity (initial default value) for the reduction
3378 * @param reducer a commutative associative combining function
3379 * @return the result of accumulating the given transformation
3380 * of all (key, value) pairs
3381 */
3382 @SuppressWarnings("unchecked") public int reduceToIntSequentially
3383 (IntBiFunction<? super K, ? super V> transformer,
3384 int basis,
3385 IntBinaryOperator reducer) {
3386 if (transformer == null || reducer == null)
3387 throw new NullPointerException();
3388 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3389 int r = basis; V v;
3390 while ((v = it.advance()) != null)
3391 r = reducer.applyAsInt(r, transformer.applyAsInt((K)it.nextKey, v));
3392 return r;
3393 }
3394
3395 /**
3396 * Performs the given action for each key.
3397 *
3398 * @param action the action
3399 */
3400 @SuppressWarnings("unchecked") public void forEachKeySequentially
3401 (Block<? super K> action) {
3402 if (action == null) throw new NullPointerException();
3403 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3404 while (it.advance() != null)
3405 action.accept((K)it.nextKey);
3406 }
3407
3408 /**
3409 * Performs the given action for each non-null transformation
3410 * of each key.
3411 *
3412 * @param transformer a function returning the transformation
3413 * for an element, or null of there is no transformation (in
3414 * which case the action is not applied).
3415 * @param action the action
3416 */
3417 @SuppressWarnings("unchecked") public <U> void forEachKeySequentially
3418 (Function<? super K, ? extends U> transformer,
3419 Block<? super U> action) {
3420 if (transformer == null || action == null)
3421 throw new NullPointerException();
3422 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3423 U u;
3424 while (it.advance() != null) {
3425 if ((u = transformer.apply((K)it.nextKey)) != null)
3426 action.accept(u);
3427 }
3428 ForkJoinTasks.forEachKey
3429 (this, transformer, action).invoke();
3430 }
3431
3432 /**
3433 * Returns a non-null result from applying the given search
3434 * function on each key, or null if none.
3435 *
3436 * @param searchFunction a function returning a non-null
3437 * result on success, else null
3438 * @return a non-null result from applying the given search
3439 * function on each key, or null if none
3440 */
3441 @SuppressWarnings("unchecked") public <U> U searchKeysSequentially
3442 (Function<? super K, ? extends U> searchFunction) {
3443 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3444 U u;
3445 while (it.advance() != null) {
3446 if ((u = searchFunction.apply((K)it.nextKey)) != null)
3447 return u;
3448 }
3449 return null;
3450 }
3451
3452 /**
3453 * Returns the result of accumulating all keys using the given
3454 * reducer to combine values, or null if none.
3455 *
3456 * @param reducer a commutative associative combining function
3457 * @return the result of accumulating all keys using the given
3458 * reducer to combine values, or null if none
3459 */
3460 @SuppressWarnings("unchecked") public K reduceKeysSequentially
3461 (BiFunction<? super K, ? super K, ? extends K> reducer) {
3462 if (reducer == null) throw new NullPointerException();
3463 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3464 K r = null;
3465 while (it.advance() != null) {
3466 K u = (K)it.nextKey;
3467 r = (r == null) ? u : reducer.apply(r, u);
3468 }
3469 return r;
3470 }
3471
3472 /**
3473 * Returns the result of accumulating the given transformation
3474 * of all keys using the given reducer to combine values, or
3475 * null if none.
3476 *
3477 * @param transformer a function returning the transformation
3478 * for an element, or null of there is no transformation (in
3479 * which case it is not combined).
3480 * @param reducer a commutative associative combining function
3481 * @return the result of accumulating the given transformation
3482 * of all keys
3483 */
3484 @SuppressWarnings("unchecked") public <U> U reduceKeysSequentially
3485 (Function<? super K, ? extends U> transformer,
3486 BiFunction<? super U, ? super U, ? extends U> reducer) {
3487 if (transformer == null || reducer == null)
3488 throw new NullPointerException();
3489 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3490 U r = null, u;
3491 while (it.advance() != null) {
3492 if ((u = transformer.apply((K)it.nextKey)) != null)
3493 r = (r == null) ? u : reducer.apply(r, u);
3494 }
3495 return r;
3496 }
3497
3498 /**
3499 * Returns the result of accumulating the given transformation
3500 * of all keys using the given reducer to combine values, and
3501 * the given basis as an identity value.
3502 *
3503 * @param transformer a function returning the transformation
3504 * for an element
3505 * @param basis the identity (initial default value) for the reduction
3506 * @param reducer a commutative associative combining function
3507 * @return the result of accumulating the given transformation
3508 * of all keys
3509 */
3510 @SuppressWarnings("unchecked") public double reduceKeysToDoubleSequentially
3511 (DoubleFunction<? super K> transformer,
3512 double basis,
3513 DoubleBinaryOperator reducer) {
3514 if (transformer == null || reducer == null)
3515 throw new NullPointerException();
3516 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3517 double r = basis;
3518 while (it.advance() != null)
3519 r = reducer.applyAsDouble(r, transformer.applyAsDouble((K)it.nextKey));
3520 return r;
3521 }
3522
3523 /**
3524 * Returns the result of accumulating the given transformation
3525 * of all keys using the given reducer to combine values, and
3526 * the given basis as an identity value.
3527 *
3528 * @param transformer a function returning the transformation
3529 * for an element
3530 * @param basis the identity (initial default value) for the reduction
3531 * @param reducer a commutative associative combining function
3532 * @return the result of accumulating the given transformation
3533 * of all keys
3534 */
3535 @SuppressWarnings("unchecked") public long reduceKeysToLongSequentially
3536 (LongFunction<? super K> transformer,
3537 long basis,
3538 LongBinaryOperator reducer) {
3539 if (transformer == null || reducer == null)
3540 throw new NullPointerException();
3541 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3542 long r = basis;
3543 while (it.advance() != null)
3544 r = reducer.applyAsLong(r, transformer.applyAsLong((K)it.nextKey));
3545 return r;
3546 }
3547
3548 /**
3549 * Returns the result of accumulating the given transformation
3550 * of all keys using the given reducer to combine values, and
3551 * the given basis as an identity value.
3552 *
3553 * @param transformer a function returning the transformation
3554 * for an element
3555 * @param basis the identity (initial default value) for the reduction
3556 * @param reducer a commutative associative combining function
3557 * @return the result of accumulating the given transformation
3558 * of all keys
3559 */
3560 @SuppressWarnings("unchecked") public int reduceKeysToIntSequentially
3561 (IntFunction<? super K> transformer,
3562 int basis,
3563 IntBinaryOperator reducer) {
3564 if (transformer == null || reducer == null)
3565 throw new NullPointerException();
3566 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3567 int r = basis;
3568 while (it.advance() != null)
3569 r = reducer.applyAsInt(r, transformer.applyAsInt((K)it.nextKey));
3570 return r;
3571 }
3572
3573 /**
3574 * Performs the given action for each value.
3575 *
3576 * @param action the action
3577 */
3578 public void forEachValueSequentially(Block<? super V> action) {
3579 if (action == null) throw new NullPointerException();
3580 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3581 V v;
3582 while ((v = it.advance()) != null)
3583 action.accept(v);
3584 }
3585
3586 /**
3587 * Performs the given action for each non-null transformation
3588 * of each value.
3589 *
3590 * @param transformer a function returning the transformation
3591 * for an element, or null of there is no transformation (in
3592 * which case the action is not applied).
3593 */
3594 public <U> void forEachValueSequentially
3595 (Function<? super V, ? extends U> transformer,
3596 Block<? super U> action) {
3597 if (transformer == null || action == null)
3598 throw new NullPointerException();
3599 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3600 V v; U u;
3601 while ((v = it.advance()) != null) {
3602 if ((u = transformer.apply(v)) != null)
3603 action.accept(u);
3604 }
3605 }
3606
3607 /**
3608 * Returns a non-null result from applying the given search
3609 * function on each value, or null if none.
3610 *
3611 * @param searchFunction a function returning a non-null
3612 * result on success, else null
3613 * @return a non-null result from applying the given search
3614 * function on each value, or null if none
3615 *
3616 */
3617 public <U> U searchValuesSequentially
3618 (Function<? super V, ? extends U> searchFunction) {
3619 if (searchFunction == null) throw new NullPointerException();
3620 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3621 V v; U u;
3622 while ((v = it.advance()) != null) {
3623 if ((u = searchFunction.apply(v)) != null)
3624 return u;
3625 }
3626 return null;
3627 }
3628
3629 /**
3630 * Returns the result of accumulating all values using the
3631 * given reducer to combine values, or null if none.
3632 *
3633 * @param reducer a commutative associative combining function
3634 * @return the result of accumulating all values
3635 */
3636 public V reduceValuesSequentially
3637 (BiFunction<? super V, ? super V, ? extends V> reducer) {
3638 if (reducer == null) throw new NullPointerException();
3639 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3640 V r = null; V v;
3641 while ((v = it.advance()) != null)
3642 r = (r == null) ? v : reducer.apply(r, v);
3643 return r;
3644 }
3645
3646 /**
3647 * Returns the result of accumulating the given transformation
3648 * of all values using the given reducer to combine values, or
3649 * null if none.
3650 *
3651 * @param transformer a function returning the transformation
3652 * for an element, or null of there is no transformation (in
3653 * which case it is not combined).
3654 * @param reducer a commutative associative combining function
3655 * @return the result of accumulating the given transformation
3656 * of all values
3657 */
3658 public <U> U reduceValuesSequentially
3659 (Function<? super V, ? extends U> transformer,
3660 BiFunction<? super U, ? super U, ? extends U> reducer) {
3661 if (transformer == null || reducer == null)
3662 throw new NullPointerException();
3663 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3664 U r = null, u; V v;
3665 while ((v = it.advance()) != null) {
3666 if ((u = transformer.apply(v)) != null)
3667 r = (r == null) ? u : reducer.apply(r, u);
3668 }
3669 return r;
3670 }
3671
3672 /**
3673 * Returns the result of accumulating the given transformation
3674 * of all values using the given reducer to combine values,
3675 * and the given basis as an identity value.
3676 *
3677 * @param transformer a function returning the transformation
3678 * for an element
3679 * @param basis the identity (initial default value) for the reduction
3680 * @param reducer a commutative associative combining function
3681 * @return the result of accumulating the given transformation
3682 * of all values
3683 */
3684 public double reduceValuesToDoubleSequentially
3685 (DoubleFunction<? super V> transformer,
3686 double basis,
3687 DoubleBinaryOperator reducer) {
3688 if (transformer == null || reducer == null)
3689 throw new NullPointerException();
3690 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3691 double r = basis; V v;
3692 while ((v = it.advance()) != null)
3693 r = reducer.applyAsDouble(r, transformer.applyAsDouble(v));
3694 return r;
3695 }
3696
3697 /**
3698 * Returns the result of accumulating the given transformation
3699 * of all values using the given reducer to combine values,
3700 * and the given basis as an identity value.
3701 *
3702 * @param transformer a function returning the transformation
3703 * for an element
3704 * @param basis the identity (initial default value) for the reduction
3705 * @param reducer a commutative associative combining function
3706 * @return the result of accumulating the given transformation
3707 * of all values
3708 */
3709 public long reduceValuesToLongSequentially
3710 (LongFunction<? super V> transformer,
3711 long basis,
3712 LongBinaryOperator reducer) {
3713 if (transformer == null || reducer == null)
3714 throw new NullPointerException();
3715 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3716 long r = basis; V v;
3717 while ((v = it.advance()) != null)
3718 r = reducer.applyAsLong(r, transformer.applyAsLong(v));
3719 return r;
3720 }
3721
3722 /**
3723 * Returns the result of accumulating the given transformation
3724 * of all values using the given reducer to combine values,
3725 * and the given basis as an identity value.
3726 *
3727 * @param transformer a function returning the transformation
3728 * for an element
3729 * @param basis the identity (initial default value) for the reduction
3730 * @param reducer a commutative associative combining function
3731 * @return the result of accumulating the given transformation
3732 * of all values
3733 */
3734 public int reduceValuesToIntSequentially
3735 (IntFunction<? super V> transformer,
3736 int basis,
3737 IntBinaryOperator reducer) {
3738 if (transformer == null || reducer == null)
3739 throw new NullPointerException();
3740 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3741 int r = basis; V v;
3742 while ((v = it.advance()) != null)
3743 r = reducer.applyAsInt(r, transformer.applyAsInt(v));
3744 return r;
3745 }
3746
3747 /**
3748 * Performs the given action for each entry.
3749 *
3750 * @param action the action
3751 */
3752 @SuppressWarnings("unchecked") public void forEachEntrySequentially
3753 (Block<? super Map.Entry<K,V>> action) {
3754 if (action == null) throw new NullPointerException();
3755 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3756 V v;
3757 while ((v = it.advance()) != null)
3758 action.accept(entryFor((K)it.nextKey, v));
3759 }
3760
3761 /**
3762 * Performs the given action for each non-null transformation
3763 * of each entry.
3764 *
3765 * @param transformer a function returning the transformation
3766 * for an element, or null of there is no transformation (in
3767 * which case the action is not applied).
3768 * @param action the action
3769 */
3770 @SuppressWarnings("unchecked") public <U> void forEachEntrySequentially
3771 (Function<Map.Entry<K,V>, ? extends U> transformer,
3772 Block<? super U> action) {
3773 if (transformer == null || action == null)
3774 throw new NullPointerException();
3775 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3776 V v; U u;
3777 while ((v = it.advance()) != null) {
3778 if ((u = transformer.apply(entryFor((K)it.nextKey, v))) != null)
3779 action.accept(u);
3780 }
3781 }
3782
3783 /**
3784 * Returns a non-null result from applying the given search
3785 * function on each entry, or null if none.
3786 *
3787 * @param searchFunction a function returning a non-null
3788 * result on success, else null
3789 * @return a non-null result from applying the given search
3790 * function on each entry, or null if none
3791 */
3792 @SuppressWarnings("unchecked") public <U> U searchEntriesSequentially
3793 (Function<Map.Entry<K,V>, ? extends U> searchFunction) {
3794 if (searchFunction == null) throw new NullPointerException();
3795 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3796 V v; U u;
3797 while ((v = it.advance()) != null) {
3798 if ((u = searchFunction.apply(entryFor((K)it.nextKey, v))) != null)
3799 return u;
3800 }
3801 return null;
3802 }
3803
3804 /**
3805 * Returns the result of accumulating all entries using the
3806 * given reducer to combine values, or null if none.
3807 *
3808 * @param reducer a commutative associative combining function
3809 * @return the result of accumulating all entries
3810 */
3811 @SuppressWarnings("unchecked") public Map.Entry<K,V> reduceEntriesSequentially
3812 (BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
3813 if (reducer == null) throw new NullPointerException();
3814 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3815 Map.Entry<K,V> r = null; V v;
3816 while ((v = it.advance()) != null) {
3817 Map.Entry<K,V> u = entryFor((K)it.nextKey, v);
3818 r = (r == null) ? u : reducer.apply(r, u);
3819 }
3820 return r;
3821 }
3822
3823 /**
3824 * Returns the result of accumulating the given transformation
3825 * of all entries using the given reducer to combine values,
3826 * or null if none.
3827 *
3828 * @param transformer a function returning the transformation
3829 * for an element, or null of there is no transformation (in
3830 * which case it is not combined).
3831 * @param reducer a commutative associative combining function
3832 * @return the result of accumulating the given transformation
3833 * of all entries
3834 */
3835 @SuppressWarnings("unchecked") public <U> U reduceEntriesSequentially
3836 (Function<Map.Entry<K,V>, ? extends U> transformer,
3837 BiFunction<? super U, ? super U, ? extends U> reducer) {
3838 if (transformer == null || reducer == null)
3839 throw new NullPointerException();
3840 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3841 U r = null, u; V v;
3842 while ((v = it.advance()) != null) {
3843 if ((u = transformer.apply(entryFor((K)it.nextKey, v))) != null)
3844 r = (r == null) ? u : reducer.apply(r, u);
3845 }
3846 return r;
3847 }
3848
3849 /**
3850 * Returns the result of accumulating the given transformation
3851 * of all entries using the given reducer to combine values,
3852 * and the given basis as an identity value.
3853 *
3854 * @param transformer a function returning the transformation
3855 * for an element
3856 * @param basis the identity (initial default value) for the reduction
3857 * @param reducer a commutative associative combining function
3858 * @return the result of accumulating the given transformation
3859 * of all entries
3860 */
3861 @SuppressWarnings("unchecked") public double reduceEntriesToDoubleSequentially
3862 (DoubleFunction<Map.Entry<K,V>> transformer,
3863 double basis,
3864 DoubleBinaryOperator reducer) {
3865 if (transformer == null || reducer == null)
3866 throw new NullPointerException();
3867 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3868 double r = basis; V v;
3869 while ((v = it.advance()) != null)
3870 r = reducer.applyAsDouble(r, transformer.applyAsDouble(entryFor((K)it.nextKey, v)));
3871 return r;
3872 }
3873
3874 /**
3875 * Returns the result of accumulating the given transformation
3876 * of all entries using the given reducer to combine values,
3877 * and the given basis as an identity value.
3878 *
3879 * @param transformer a function returning the transformation
3880 * for an element
3881 * @param basis the identity (initial default value) for the reduction
3882 * @param reducer a commutative associative combining function
3883 * @return the result of accumulating the given transformation
3884 * of all entries
3885 */
3886 @SuppressWarnings("unchecked") public long reduceEntriesToLongSequentially
3887 (LongFunction<Map.Entry<K,V>> transformer,
3888 long basis,
3889 LongBinaryOperator reducer) {
3890 if (transformer == null || reducer == null)
3891 throw new NullPointerException();
3892 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3893 long r = basis; V v;
3894 while ((v = it.advance()) != null)
3895 r = reducer.applyAsLong(r, transformer.applyAsLong(entryFor((K)it.nextKey, v)));
3896 return r;
3897 }
3898
3899 /**
3900 * Returns the result of accumulating the given transformation
3901 * of all entries using the given reducer to combine values,
3902 * and the given basis as an identity value.
3903 *
3904 * @param transformer a function returning the transformation
3905 * for an element
3906 * @param basis the identity (initial default value) for the reduction
3907 * @param reducer a commutative associative combining function
3908 * @return the result of accumulating the given transformation
3909 * of all entries
3910 */
3911 @SuppressWarnings("unchecked") public int reduceEntriesToIntSequentially
3912 (IntFunction<Map.Entry<K,V>> transformer,
3913 int basis,
3914 IntBinaryOperator reducer) {
3915 if (transformer == null || reducer == null)
3916 throw new NullPointerException();
3917 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3918 int r = basis; V v;
3919 while ((v = it.advance()) != null)
3920 r = reducer.applyAsInt(r, transformer.applyAsInt(entryFor((K)it.nextKey, v)));
3921 return r;
3922 }
3923
3924 // Parallel bulk operations
3925
3926 /**
3927 * Performs the given action for each (key, value).
3928 *
3929 * @param action the action
3930 */
3931 public void forEachInParallel(BiBlock<? super K,? super V> action) {
3932 ForkJoinTasks.forEach
3933 (this, action).invoke();
3934 }
3935
3936 /**
3937 * Performs the given action for each non-null transformation
3938 * of each (key, value).
3939 *
3940 * @param transformer a function returning the transformation
3941 * for an element, or null of there is no transformation (in
3942 * which case the action is not applied).
3943 * @param action the action
3944 */
3945 public <U> void forEachInParallel
3946 (BiFunction<? super K, ? super V, ? extends U> transformer,
3947 Block<? super U> action) {
3948 ForkJoinTasks.forEach
3949 (this, transformer, action).invoke();
3950 }
3951
3952 /**
3953 * Returns a non-null result from applying the given search
3954 * function on each (key, value), or null if none. Upon
3955 * success, further element processing is suppressed and the
3956 * results of any other parallel invocations of the search
3957 * function are ignored.
3958 *
3959 * @param searchFunction a function returning a non-null
3960 * result on success, else null
3961 * @return a non-null result from applying the given search
3962 * function on each (key, value), or null if none
3963 */
3964 public <U> U searchInParallel
3965 (BiFunction<? super K, ? super V, ? extends U> searchFunction) {
3966 return ForkJoinTasks.search
3967 (this, searchFunction).invoke();
3968 }
3969
3970 /**
3971 * Returns the result of accumulating the given transformation
3972 * of all (key, value) pairs using the given reducer to
3973 * combine values, or null if none.
3974 *
3975 * @param transformer a function returning the transformation
3976 * for an element, or null of there is no transformation (in
3977 * which case it is not combined).
3978 * @param reducer a commutative associative combining function
3979 * @return the result of accumulating the given transformation
3980 * of all (key, value) pairs
3981 */
3982 public <U> U reduceInParallel
3983 (BiFunction<? super K, ? super V, ? extends U> transformer,
3984 BiFunction<? super U, ? super U, ? extends U> reducer) {
3985 return ForkJoinTasks.reduce
3986 (this, transformer, reducer).invoke();
3987 }
3988
3989 /**
3990 * Returns the result of accumulating the given transformation
3991 * of all (key, value) pairs using the given reducer to
3992 * combine values, and the given basis as an identity value.
3993 *
3994 * @param transformer a function returning the transformation
3995 * for an element
3996 * @param basis the identity (initial default value) for the reduction
3997 * @param reducer a commutative associative combining function
3998 * @return the result of accumulating the given transformation
3999 * of all (key, value) pairs
4000 */
4001 public double reduceToDoubleInParallel
4002 (DoubleBiFunction<? super K, ? super V> transformer,
4003 double basis,
4004 DoubleBinaryOperator reducer) {
4005 return ForkJoinTasks.reduceToDouble
4006 (this, transformer, basis, reducer).invoke();
4007 }
4008
4009 /**
4010 * Returns the result of accumulating the given transformation
4011 * of all (key, value) pairs using the given reducer to
4012 * combine values, and the given basis as an identity value.
4013 *
4014 * @param transformer a function returning the transformation
4015 * for an element
4016 * @param basis the identity (initial default value) for the reduction
4017 * @param reducer a commutative associative combining function
4018 * @return the result of accumulating the given transformation
4019 * of all (key, value) pairs
4020 */
4021 public long reduceToLongInParallel
4022 (LongBiFunction<? super K, ? super V> transformer,
4023 long basis,
4024 LongBinaryOperator reducer) {
4025 return ForkJoinTasks.reduceToLong
4026 (this, transformer, basis, reducer).invoke();
4027 }
4028
4029 /**
4030 * Returns the result of accumulating the given transformation
4031 * of all (key, value) pairs using the given reducer to
4032 * combine values, and the given basis as an identity value.
4033 *
4034 * @param transformer a function returning the transformation
4035 * for an element
4036 * @param basis the identity (initial default value) for the reduction
4037 * @param reducer a commutative associative combining function
4038 * @return the result of accumulating the given transformation
4039 * of all (key, value) pairs
4040 */
4041 public int reduceToIntInParallel
4042 (IntBiFunction<? super K, ? super V> transformer,
4043 int basis,
4044 IntBinaryOperator reducer) {
4045 return ForkJoinTasks.reduceToInt
4046 (this, transformer, basis, reducer).invoke();
4047 }
4048
4049 /**
4050 * Performs the given action for each key.
4051 *
4052 * @param action the action
4053 */
4054 public void forEachKeyInParallel(Block<? super K> action) {
4055 ForkJoinTasks.forEachKey
4056 (this, action).invoke();
4057 }
4058
4059 /**
4060 * Performs the given action for each non-null transformation
4061 * of each key.
4062 *
4063 * @param transformer a function returning the transformation
4064 * for an element, or null of there is no transformation (in
4065 * which case the action is not applied).
4066 * @param action the action
4067 */
4068 public <U> void forEachKeyInParallel
4069 (Function<? super K, ? extends U> transformer,
4070 Block<? super U> action) {
4071 ForkJoinTasks.forEachKey
4072 (this, transformer, action).invoke();
4073 }
4074
4075 /**
4076 * Returns a non-null result from applying the given search
4077 * function on each key, or null if none. Upon success,
4078 * further element processing is suppressed and the results of
4079 * any other parallel invocations of the search function are
4080 * ignored.
4081 *
4082 * @param searchFunction a function returning a non-null
4083 * result on success, else null
4084 * @return a non-null result from applying the given search
4085 * function on each key, or null if none
4086 */
4087 public <U> U searchKeysInParallel
4088 (Function<? super K, ? extends U> searchFunction) {
4089 return ForkJoinTasks.searchKeys
4090 (this, searchFunction).invoke();
4091 }
4092
4093 /**
4094 * Returns the result of accumulating all keys using the given
4095 * reducer to combine values, or null if none.
4096 *
4097 * @param reducer a commutative associative combining function
4098 * @return the result of accumulating all keys using the given
4099 * reducer to combine values, or null if none
4100 */
4101 public K reduceKeysInParallel
4102 (BiFunction<? super K, ? super K, ? extends K> reducer) {
4103 return ForkJoinTasks.reduceKeys
4104 (this, reducer).invoke();
4105 }
4106
4107 /**
4108 * Returns the result of accumulating the given transformation
4109 * of all keys using the given reducer to combine values, or
4110 * null if none.
4111 *
4112 * @param transformer a function returning the transformation
4113 * for an element, or null of there is no transformation (in
4114 * which case it is not combined).
4115 * @param reducer a commutative associative combining function
4116 * @return the result of accumulating the given transformation
4117 * of all keys
4118 */
4119 public <U> U reduceKeysInParallel
4120 (Function<? super K, ? extends U> transformer,
4121 BiFunction<? super U, ? super U, ? extends U> reducer) {
4122 return ForkJoinTasks.reduceKeys
4123 (this, transformer, reducer).invoke();
4124 }
4125
4126 /**
4127 * Returns the result of accumulating the given transformation
4128 * of all keys using the given reducer to combine values, and
4129 * the given basis as an identity value.
4130 *
4131 * @param transformer a function returning the transformation
4132 * for an element
4133 * @param basis the identity (initial default value) for the reduction
4134 * @param reducer a commutative associative combining function
4135 * @return the result of accumulating the given transformation
4136 * of all keys
4137 */
4138 public double reduceKeysToDoubleInParallel
4139 (DoubleFunction<? super K> transformer,
4140 double basis,
4141 DoubleBinaryOperator reducer) {
4142 return ForkJoinTasks.reduceKeysToDouble
4143 (this, transformer, basis, reducer).invoke();
4144 }
4145
4146 /**
4147 * Returns the result of accumulating the given transformation
4148 * of all keys using the given reducer to combine values, and
4149 * the given basis as an identity value.
4150 *
4151 * @param transformer a function returning the transformation
4152 * for an element
4153 * @param basis the identity (initial default value) for the reduction
4154 * @param reducer a commutative associative combining function
4155 * @return the result of accumulating the given transformation
4156 * of all keys
4157 */
4158 public long reduceKeysToLongInParallel
4159 (LongFunction<? super K> transformer,
4160 long basis,
4161 LongBinaryOperator reducer) {
4162 return ForkJoinTasks.reduceKeysToLong
4163 (this, transformer, basis, reducer).invoke();
4164 }
4165
4166 /**
4167 * Returns the result of accumulating the given transformation
4168 * of all keys using the given reducer to combine values, and
4169 * the given basis as an identity value.
4170 *
4171 * @param transformer a function returning the transformation
4172 * for an element
4173 * @param basis the identity (initial default value) for the reduction
4174 * @param reducer a commutative associative combining function
4175 * @return the result of accumulating the given transformation
4176 * of all keys
4177 */
4178 public int reduceKeysToIntInParallel
4179 (IntFunction<? super K> transformer,
4180 int basis,
4181 IntBinaryOperator reducer) {
4182 return ForkJoinTasks.reduceKeysToInt
4183 (this, transformer, basis, reducer).invoke();
4184 }
4185
4186 /**
4187 * Performs the given action for each value.
4188 *
4189 * @param action the action
4190 */
4191 public void forEachValueInParallel(Block<? super V> action) {
4192 ForkJoinTasks.forEachValue
4193 (this, action).invoke();
4194 }
4195
4196 /**
4197 * Performs the given action for each non-null transformation
4198 * of each value.
4199 *
4200 * @param transformer a function returning the transformation
4201 * for an element, or null of there is no transformation (in
4202 * which case the action is not applied).
4203 */
4204 public <U> void forEachValueInParallel
4205 (Function<? super V, ? extends U> transformer,
4206 Block<? super U> action) {
4207 ForkJoinTasks.forEachValue
4208 (this, transformer, action).invoke();
4209 }
4210
4211 /**
4212 * Returns a non-null result from applying the given search
4213 * function on each value, or null if none. Upon success,
4214 * further element processing is suppressed and the results of
4215 * any other parallel invocations of the search function are
4216 * ignored.
4217 *
4218 * @param searchFunction a function returning a non-null
4219 * result on success, else null
4220 * @return a non-null result from applying the given search
4221 * function on each value, or null if none
4222 *
4223 */
4224 public <U> U searchValuesInParallel
4225 (Function<? super V, ? extends U> searchFunction) {
4226 return ForkJoinTasks.searchValues
4227 (this, searchFunction).invoke();
4228 }
4229
4230 /**
4231 * Returns the result of accumulating all values using the
4232 * given reducer to combine values, or null if none.
4233 *
4234 * @param reducer a commutative associative combining function
4235 * @return the result of accumulating all values
4236 */
4237 public V reduceValuesInParallel
4238 (BiFunction<? super V, ? super V, ? extends V> reducer) {
4239 return ForkJoinTasks.reduceValues
4240 (this, reducer).invoke();
4241 }
4242
4243 /**
4244 * Returns the result of accumulating the given transformation
4245 * of all values using the given reducer to combine values, or
4246 * null if none.
4247 *
4248 * @param transformer a function returning the transformation
4249 * for an element, or null of there is no transformation (in
4250 * which case it is not combined).
4251 * @param reducer a commutative associative combining function
4252 * @return the result of accumulating the given transformation
4253 * of all values
4254 */
4255 public <U> U reduceValuesInParallel
4256 (Function<? super V, ? extends U> transformer,
4257 BiFunction<? super U, ? super U, ? extends U> reducer) {
4258 return ForkJoinTasks.reduceValues
4259 (this, transformer, reducer).invoke();
4260 }
4261
4262 /**
4263 * Returns the result of accumulating the given transformation
4264 * of all values using the given reducer to combine values,
4265 * and the given basis as an identity value.
4266 *
4267 * @param transformer a function returning the transformation
4268 * for an element
4269 * @param basis the identity (initial default value) for the reduction
4270 * @param reducer a commutative associative combining function
4271 * @return the result of accumulating the given transformation
4272 * of all values
4273 */
4274 public double reduceValuesToDoubleInParallel
4275 (DoubleFunction<? super V> transformer,
4276 double basis,
4277 DoubleBinaryOperator reducer) {
4278 return ForkJoinTasks.reduceValuesToDouble
4279 (this, transformer, basis, reducer).invoke();
4280 }
4281
4282 /**
4283 * Returns the result of accumulating the given transformation
4284 * of all values using the given reducer to combine values,
4285 * and the given basis as an identity value.
4286 *
4287 * @param transformer a function returning the transformation
4288 * for an element
4289 * @param basis the identity (initial default value) for the reduction
4290 * @param reducer a commutative associative combining function
4291 * @return the result of accumulating the given transformation
4292 * of all values
4293 */
4294 public long reduceValuesToLongInParallel
4295 (LongFunction<? super V> transformer,
4296 long basis,
4297 LongBinaryOperator reducer) {
4298 return ForkJoinTasks.reduceValuesToLong
4299 (this, transformer, basis, reducer).invoke();
4300 }
4301
4302 /**
4303 * Returns the result of accumulating the given transformation
4304 * of all values using the given reducer to combine values,
4305 * and the given basis as an identity value.
4306 *
4307 * @param transformer a function returning the transformation
4308 * for an element
4309 * @param basis the identity (initial default value) for the reduction
4310 * @param reducer a commutative associative combining function
4311 * @return the result of accumulating the given transformation
4312 * of all values
4313 */
4314 public int reduceValuesToIntInParallel
4315 (IntFunction<? super V> transformer,
4316 int basis,
4317 IntBinaryOperator reducer) {
4318 return ForkJoinTasks.reduceValuesToInt
4319 (this, transformer, basis, reducer).invoke();
4320 }
4321
4322 /**
4323 * Performs the given action for each entry.
4324 *
4325 * @param action the action
4326 */
4327 public void forEachEntryInParallel(Block<? super Map.Entry<K,V>> action) {
4328 ForkJoinTasks.forEachEntry
4329 (this, action).invoke();
4330 }
4331
4332 /**
4333 * Performs the given action for each non-null transformation
4334 * of each entry.
4335 *
4336 * @param transformer a function returning the transformation
4337 * for an element, or null of there is no transformation (in
4338 * which case the action is not applied).
4339 * @param action the action
4340 */
4341 public <U> void forEachEntryInParallel
4342 (Function<Map.Entry<K,V>, ? extends U> transformer,
4343 Block<? super U> action) {
4344 ForkJoinTasks.forEachEntry
4345 (this, transformer, action).invoke();
4346 }
4347
4348 /**
4349 * Returns a non-null result from applying the given search
4350 * function on each entry, or null if none. Upon success,
4351 * further element processing is suppressed and the results of
4352 * any other parallel invocations of the search function are
4353 * ignored.
4354 *
4355 * @param searchFunction a function returning a non-null
4356 * result on success, else null
4357 * @return a non-null result from applying the given search
4358 * function on each entry, or null if none
4359 */
4360 public <U> U searchEntriesInParallel
4361 (Function<Map.Entry<K,V>, ? extends U> searchFunction) {
4362 return ForkJoinTasks.searchEntries
4363 (this, searchFunction).invoke();
4364 }
4365
4366 /**
4367 * Returns the result of accumulating all entries using the
4368 * given reducer to combine values, or null if none.
4369 *
4370 * @param reducer a commutative associative combining function
4371 * @return the result of accumulating all entries
4372 */
4373 public Map.Entry<K,V> reduceEntriesInParallel
4374 (BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
4375 return ForkJoinTasks.reduceEntries
4376 (this, reducer).invoke();
4377 }
4378
4379 /**
4380 * Returns the result of accumulating the given transformation
4381 * of all entries using the given reducer to combine values,
4382 * or null if none.
4383 *
4384 * @param transformer a function returning the transformation
4385 * for an element, or null of there is no transformation (in
4386 * which case it is not combined).
4387 * @param reducer a commutative associative combining function
4388 * @return the result of accumulating the given transformation
4389 * of all entries
4390 */
4391 public <U> U reduceEntriesInParallel
4392 (Function<Map.Entry<K,V>, ? extends U> transformer,
4393 BiFunction<? super U, ? super U, ? extends U> reducer) {
4394 return ForkJoinTasks.reduceEntries
4395 (this, transformer, reducer).invoke();
4396 }
4397
4398 /**
4399 * Returns the result of accumulating the given transformation
4400 * of all entries using the given reducer to combine values,
4401 * and the given basis as an identity value.
4402 *
4403 * @param transformer a function returning the transformation
4404 * for an element
4405 * @param basis the identity (initial default value) for the reduction
4406 * @param reducer a commutative associative combining function
4407 * @return the result of accumulating the given transformation
4408 * of all entries
4409 */
4410 public double reduceEntriesToDoubleInParallel
4411 (DoubleFunction<Map.Entry<K,V>> transformer,
4412 double basis,
4413 DoubleBinaryOperator reducer) {
4414 return ForkJoinTasks.reduceEntriesToDouble
4415 (this, transformer, basis, reducer).invoke();
4416 }
4417
4418 /**
4419 * Returns the result of accumulating the given transformation
4420 * of all entries using the given reducer to combine values,
4421 * and the given basis as an identity value.
4422 *
4423 * @param transformer a function returning the transformation
4424 * for an element
4425 * @param basis the identity (initial default value) for the reduction
4426 * @param reducer a commutative associative combining function
4427 * @return the result of accumulating the given transformation
4428 * of all entries
4429 */
4430 public long reduceEntriesToLongInParallel
4431 (LongFunction<Map.Entry<K,V>> transformer,
4432 long basis,
4433 LongBinaryOperator reducer) {
4434 return ForkJoinTasks.reduceEntriesToLong
4435 (this, transformer, basis, reducer).invoke();
4436 }
4437
4438 /**
4439 * Returns the result of accumulating the given transformation
4440 * of all entries using the given reducer to combine values,
4441 * and the given basis as an identity value.
4442 *
4443 * @param transformer a function returning the transformation
4444 * for an element
4445 * @param basis the identity (initial default value) for the reduction
4446 * @param reducer a commutative associative combining function
4447 * @return the result of accumulating the given transformation
4448 * of all entries
4449 */
4450 public int reduceEntriesToIntInParallel
4451 (IntFunction<Map.Entry<K,V>> transformer,
4452 int basis,
4453 IntBinaryOperator reducer) {
4454 return ForkJoinTasks.reduceEntriesToInt
4455 (this, transformer, basis, reducer).invoke();
4456 }
4457
4458
4459 /* ----------------Views -------------- */
4460
4461 /**
4462 * Base class for views.
4463 */
4464 static abstract class CHMView<K, V> {
4465 final ConcurrentHashMap<K, V> map;
4466 CHMView(ConcurrentHashMap<K, V> map) { this.map = map; }
4467
4468 /**
4469 * Returns the map backing this view.
4470 *
4471 * @return the map backing this view
4472 */
4473 public ConcurrentHashMap<K,V> getMap() { return map; }
4474
4475 public final int size() { return map.size(); }
4476 public final boolean isEmpty() { return map.isEmpty(); }
4477 public final void clear() { map.clear(); }
4478
4479 // implementations below rely on concrete classes supplying these
4480 abstract public Iterator<?> iterator();
4481 abstract public boolean contains(Object o);
4482 abstract public boolean remove(Object o);
4483
4484 private static final String oomeMsg = "Required array size too large";
4485
4486 public final Object[] toArray() {
4487 long sz = map.mappingCount();
4488 if (sz > (long)(MAX_ARRAY_SIZE))
4489 throw new OutOfMemoryError(oomeMsg);
4490 int n = (int)sz;
4491 Object[] r = new Object[n];
4492 int i = 0;
4493 Iterator<?> it = iterator();
4494 while (it.hasNext()) {
4495 if (i == n) {
4496 if (n >= MAX_ARRAY_SIZE)
4497 throw new OutOfMemoryError(oomeMsg);
4498 if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
4499 n = MAX_ARRAY_SIZE;
4500 else
4501 n += (n >>> 1) + 1;
4502 r = Arrays.copyOf(r, n);
4503 }
4504 r[i++] = it.next();
4505 }
4506 return (i == n) ? r : Arrays.copyOf(r, i);
4507 }
4508
4509 @SuppressWarnings("unchecked") public final <T> T[] toArray(T[] a) {
4510 long sz = map.mappingCount();
4511 if (sz > (long)(MAX_ARRAY_SIZE))
4512 throw new OutOfMemoryError(oomeMsg);
4513 int m = (int)sz;
4514 T[] r = (a.length >= m) ? a :
4515 (T[])java.lang.reflect.Array
4516 .newInstance(a.getClass().getComponentType(), m);
4517 int n = r.length;
4518 int i = 0;
4519 Iterator<?> it = iterator();
4520 while (it.hasNext()) {
4521 if (i == n) {
4522 if (n >= MAX_ARRAY_SIZE)
4523 throw new OutOfMemoryError(oomeMsg);
4524 if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
4525 n = MAX_ARRAY_SIZE;
4526 else
4527 n += (n >>> 1) + 1;
4528 r = Arrays.copyOf(r, n);
4529 }
4530 r[i++] = (T)it.next();
4531 }
4532 if (a == r && i < n) {
4533 r[i] = null; // null-terminate
4534 return r;
4535 }
4536 return (i == n) ? r : Arrays.copyOf(r, i);
4537 }
4538
4539 public final int hashCode() {
4540 int h = 0;
4541 for (Iterator<?> it = iterator(); it.hasNext();)
4542 h += it.next().hashCode();
4543 return h;
4544 }
4545
4546 public final String toString() {
4547 StringBuilder sb = new StringBuilder();
4548 sb.append('[');
4549 Iterator<?> it = iterator();
4550 if (it.hasNext()) {
4551 for (;;) {
4552 Object e = it.next();
4553 sb.append(e == this ? "(this Collection)" : e);
4554 if (!it.hasNext())
4555 break;
4556 sb.append(',').append(' ');
4557 }
4558 }
4559 return sb.append(']').toString();
4560 }
4561
4562 public final boolean containsAll(Collection<?> c) {
4563 if (c != this) {
4564 for (Iterator<?> it = c.iterator(); it.hasNext();) {
4565 Object e = it.next();
4566 if (e == null || !contains(e))
4567 return false;
4568 }
4569 }
4570 return true;
4571 }
4572
4573 public final boolean removeAll(Collection<?> c) {
4574 boolean modified = false;
4575 for (Iterator<?> it = iterator(); it.hasNext();) {
4576 if (c.contains(it.next())) {
4577 it.remove();
4578 modified = true;
4579 }
4580 }
4581 return modified;
4582 }
4583
4584 public final boolean retainAll(Collection<?> c) {
4585 boolean modified = false;
4586 for (Iterator<?> it = iterator(); it.hasNext();) {
4587 if (!c.contains(it.next())) {
4588 it.remove();
4589 modified = true;
4590 }
4591 }
4592 return modified;
4593 }
4594
4595 }
4596
4597 /**
4598 * A view of a ConcurrentHashMap as a {@link Set} of keys, in
4599 * which additions may optionally be enabled by mapping to a
4600 * common value. This class cannot be directly instantiated. See
4601 * {@link #keySet}, {@link #keySet(Object)}, {@link #newKeySet()},
4602 * {@link #newKeySet(int)}.
4603 */
4604 public static class KeySetView<K,V> extends CHMView<K,V>
4605 implements Set<K>, java.io.Serializable {
4606 private static final long serialVersionUID = 7249069246763182397L;
4607 private final V value;
4608 KeySetView(ConcurrentHashMap<K, V> map, V value) { // non-public
4609 super(map);
4610 this.value = value;
4611 }
4612
4613 /**
4614 * Returns the default mapped value for additions,
4615 * or {@code null} if additions are not supported.
4616 *
4617 * @return the default mapped value for additions, or {@code null}
4618 * if not supported.
4619 */
4620 public V getMappedValue() { return value; }
4621
4622 // implement Set API
4623
4624 public boolean contains(Object o) { return map.containsKey(o); }
4625 public boolean remove(Object o) { return map.remove(o) != null; }
4626
4627 /**
4628 * Returns a "weakly consistent" iterator that will never
4629 * throw {@link ConcurrentModificationException}, and
4630 * guarantees to traverse elements as they existed upon
4631 * construction of the iterator, and may (but is not
4632 * guaranteed to) reflect any modifications subsequent to
4633 * construction.
4634 *
4635 * @return an iterator over the keys of this map
4636 */
4637 public Iterator<K> iterator() { return new KeyIterator<K,V>(map); }
4638 public boolean add(K e) {
4639 V v;
4640 if ((v = value) == null)
4641 throw new UnsupportedOperationException();
4642 if (e == null)
4643 throw new NullPointerException();
4644 return map.internalPut(e, v, true) == null;
4645 }
4646 public boolean addAll(Collection<? extends K> c) {
4647 boolean added = false;
4648 V v;
4649 if ((v = value) == null)
4650 throw new UnsupportedOperationException();
4651 for (K e : c) {
4652 if (e == null)
4653 throw new NullPointerException();
4654 if (map.internalPut(e, v, true) == null)
4655 added = true;
4656 }
4657 return added;
4658 }
4659 public boolean equals(Object o) {
4660 Set<?> c;
4661 return ((o instanceof Set) &&
4662 ((c = (Set<?>)o) == this ||
4663 (containsAll(c) && c.containsAll(this))));
4664 }
4665
4666 public Stream<K> stream() {
4667 return Streams.stream(() -> new KeyIterator<K,V>(map), 0);
4668 }
4669 public Stream<K> parallelStream() {
4670 return Streams.parallelStream(() -> new KeyIterator<K,V>(map, null),
4671 0);
4672 }
4673 }
4674
4675 /**
4676 * A view of a ConcurrentHashMap as a {@link Collection} of
4677 * values, in which additions are disabled. This class cannot be
4678 * directly instantiated. See {@link #values},
4679 *
4680 * <p>The view's {@code iterator} is a "weakly consistent" iterator
4681 * that will never throw {@link ConcurrentModificationException},
4682 * and guarantees to traverse elements as they existed upon
4683 * construction of the iterator, and may (but is not guaranteed to)
4684 * reflect any modifications subsequent to construction.
4685 */
4686 public static final class ValuesView<K,V> extends CHMView<K,V>
4687 implements Collection<V> {
4688 ValuesView(ConcurrentHashMap<K, V> map) { super(map); }
4689 public final boolean contains(Object o) { return map.containsValue(o); }
4690 public final boolean remove(Object o) {
4691 if (o != null) {
4692 Iterator<V> it = new ValueIterator<K,V>(map);
4693 while (it.hasNext()) {
4694 if (o.equals(it.next())) {
4695 it.remove();
4696 return true;
4697 }
4698 }
4699 }
4700 return false;
4701 }
4702
4703 /**
4704 * Returns a "weakly consistent" iterator that will never
4705 * throw {@link ConcurrentModificationException}, and
4706 * guarantees to traverse elements as they existed upon
4707 * construction of the iterator, and may (but is not
4708 * guaranteed to) reflect any modifications subsequent to
4709 * construction.
4710 *
4711 * @return an iterator over the values of this map
4712 */
4713 public final Iterator<V> iterator() {
4714 return new ValueIterator<K,V>(map);
4715 }
4716 public final boolean add(V e) {
4717 throw new UnsupportedOperationException();
4718 }
4719 public final boolean addAll(Collection<? extends V> c) {
4720 throw new UnsupportedOperationException();
4721 }
4722
4723 public Stream<V> stream() {
4724 return Streams.stream(() -> new ValueIterator<K,V>(map), 0);
4725 }
4726
4727 public Stream<V> parallelStream() {
4728 return Streams.parallelStream(() -> new ValueIterator<K,V>(map, null),
4729 0);
4730 }
4731
4732 }
4733
4734 /**
4735 * A view of a ConcurrentHashMap as a {@link Set} of (key, value)
4736 * entries. This class cannot be directly instantiated. See
4737 * {@link #entrySet}.
4738 */
4739 public static final class EntrySetView<K,V> extends CHMView<K,V>
4740 implements Set<Map.Entry<K,V>> {
4741 EntrySetView(ConcurrentHashMap<K, V> map) { super(map); }
4742 public final boolean contains(Object o) {
4743 Object k, v, r; Map.Entry<?,?> e;
4744 return ((o instanceof Map.Entry) &&
4745 (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
4746 (r = map.get(k)) != null &&
4747 (v = e.getValue()) != null &&
4748 (v == r || v.equals(r)));
4749 }
4750 public final boolean remove(Object o) {
4751 Object k, v; Map.Entry<?,?> e;
4752 return ((o instanceof Map.Entry) &&
4753 (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
4754 (v = e.getValue()) != null &&
4755 map.remove(k, v));
4756 }
4757
4758 /**
4759 * Returns a "weakly consistent" iterator that will never
4760 * throw {@link ConcurrentModificationException}, and
4761 * guarantees to traverse elements as they existed upon
4762 * construction of the iterator, and may (but is not
4763 * guaranteed to) reflect any modifications subsequent to
4764 * construction.
4765 *
4766 * @return an iterator over the entries of this map
4767 */
4768 public final Iterator<Map.Entry<K,V>> iterator() {
4769 return new EntryIterator<K,V>(map);
4770 }
4771
4772 public final boolean add(Entry<K,V> e) {
4773 K key = e.getKey();
4774 V value = e.getValue();
4775 if (key == null || value == null)
4776 throw new NullPointerException();
4777 return map.internalPut(key, value, false) == null;
4778 }
4779 public final boolean addAll(Collection<? extends Entry<K,V>> c) {
4780 boolean added = false;
4781 for (Entry<K,V> e : c) {
4782 if (add(e))
4783 added = true;
4784 }
4785 return added;
4786 }
4787 public boolean equals(Object o) {
4788 Set<?> c;
4789 return ((o instanceof Set) &&
4790 ((c = (Set<?>)o) == this ||
4791 (containsAll(c) && c.containsAll(this))));
4792 }
4793
4794 public Stream<Map.Entry<K,V>> stream() {
4795 return Streams.stream(() -> new EntryIterator<K,V>(map), 0);
4796 }
4797
4798 public Stream<Map.Entry<K,V>> parallelStream() {
4799 return Streams.parallelStream(() -> new EntryIterator<K,V>(map, null),
4800 0);
4801 }
4802 }
4803
4804 // ---------------------------------------------------------------------
4805
4806 /**
4807 * Predefined tasks for performing bulk parallel operations on
4808 * ConcurrentHashMaps. These tasks follow the forms and rules used
4809 * for bulk operations. Each method has the same name, but returns
4810 * a task rather than invoking it. These methods may be useful in
4811 * custom applications such as submitting a task without waiting
4812 * for completion, using a custom pool, or combining with other
4813 * tasks.
4814 */
4815 public static class ForkJoinTasks {
4816 private ForkJoinTasks() {}
4817
4818 /**
4819 * Returns a task that when invoked, performs the given
4820 * action for each (key, value)
4821 *
4822 * @param map the map
4823 * @param action the action
4824 * @return the task
4825 */
4826 public static <K,V> ForkJoinTask<Void> forEach
4827 (ConcurrentHashMap<K,V> map,
4828 BiBlock<? super K, ? super V> action) {
4829 if (action == null) throw new NullPointerException();
4830 return new ForEachMappingTask<K,V>(map, null, -1, action);
4831 }
4832
4833 /**
4834 * Returns a task that when invoked, performs the given
4835 * action for each non-null transformation of each (key, value)
4836 *
4837 * @param map the map
4838 * @param transformer a function returning the transformation
4839 * for an element, or null if there is no transformation (in
4840 * which case the action is not applied)
4841 * @param action the action
4842 * @return the task
4843 */
4844 public static <K,V,U> ForkJoinTask<Void> forEach
4845 (ConcurrentHashMap<K,V> map,
4846 BiFunction<? super K, ? super V, ? extends U> transformer,
4847 Block<? super U> action) {
4848 if (transformer == null || action == null)
4849 throw new NullPointerException();
4850 return new ForEachTransformedMappingTask<K,V,U>
4851 (map, null, -1, transformer, action);
4852 }
4853
4854 /**
4855 * Returns a task that when invoked, returns a non-null result
4856 * from applying the given search function on each (key,
4857 * value), or null if none. Upon success, further element
4858 * processing is suppressed and the results of any other
4859 * parallel invocations of the search function are ignored.
4860 *
4861 * @param map the map
4862 * @param searchFunction a function returning a non-null
4863 * result on success, else null
4864 * @return the task
4865 */
4866 public static <K,V,U> ForkJoinTask<U> search
4867 (ConcurrentHashMap<K,V> map,
4868 BiFunction<? super K, ? super V, ? extends U> searchFunction) {
4869 if (searchFunction == null) throw new NullPointerException();
4870 return new SearchMappingsTask<K,V,U>
4871 (map, null, -1, searchFunction,
4872 new AtomicReference<U>());
4873 }
4874
4875 /**
4876 * Returns a task that when invoked, returns the result of
4877 * accumulating the given transformation of all (key, value) pairs
4878 * using the given reducer to combine values, or null if none.
4879 *
4880 * @param map the map
4881 * @param transformer a function returning the transformation
4882 * for an element, or null if there is no transformation (in
4883 * which case it is not combined).
4884 * @param reducer a commutative associative combining function
4885 * @return the task
4886 */
4887 public static <K,V,U> ForkJoinTask<U> reduce
4888 (ConcurrentHashMap<K,V> map,
4889 BiFunction<? super K, ? super V, ? extends U> transformer,
4890 BiFunction<? super U, ? super U, ? extends U> reducer) {
4891 if (transformer == null || reducer == null)
4892 throw new NullPointerException();
4893 return new MapReduceMappingsTask<K,V,U>
4894 (map, null, -1, null, transformer, reducer);
4895 }
4896
4897 /**
4898 * Returns a task that when invoked, returns the result of
4899 * accumulating the given transformation of all (key, value) pairs
4900 * using the given reducer to combine values, and the given
4901 * basis as an identity value.
4902 *
4903 * @param map the map
4904 * @param transformer a function returning the transformation
4905 * for an element
4906 * @param basis the identity (initial default value) for the reduction
4907 * @param reducer a commutative associative combining function
4908 * @return the task
4909 */
4910 public static <K,V> ForkJoinTask<Double> reduceToDouble
4911 (ConcurrentHashMap<K,V> map,
4912 DoubleBiFunction<? super K, ? super V> transformer,
4913 double basis,
4914 DoubleBinaryOperator reducer) {
4915 if (transformer == null || reducer == null)
4916 throw new NullPointerException();
4917 return new MapReduceMappingsToDoubleTask<K,V>
4918 (map, null, -1, null, transformer, basis, reducer);
4919 }
4920
4921 /**
4922 * Returns a task that when invoked, returns the result of
4923 * accumulating the given transformation of all (key, value) pairs
4924 * using the given reducer to combine values, and the given
4925 * basis as an identity value.
4926 *
4927 * @param map the map
4928 * @param transformer a function returning the transformation
4929 * for an element
4930 * @param basis the identity (initial default value) for the reduction
4931 * @param reducer a commutative associative combining function
4932 * @return the task
4933 */
4934 public static <K,V> ForkJoinTask<Long> reduceToLong
4935 (ConcurrentHashMap<K,V> map,
4936 LongBiFunction<? super K, ? super V> transformer,
4937 long basis,
4938 LongBinaryOperator reducer) {
4939 if (transformer == null || reducer == null)
4940 throw new NullPointerException();
4941 return new MapReduceMappingsToLongTask<K,V>
4942 (map, null, -1, null, transformer, basis, reducer);
4943 }
4944
4945 /**
4946 * Returns a task that when invoked, returns the result of
4947 * accumulating the given transformation of all (key, value) pairs
4948 * using the given reducer to combine values, and the given
4949 * basis as an identity value.
4950 *
4951 * @param transformer a function returning the transformation
4952 * for an element
4953 * @param basis the identity (initial default value) for the reduction
4954 * @param reducer a commutative associative combining function
4955 * @return the task
4956 */
4957 public static <K,V> ForkJoinTask<Integer> reduceToInt
4958 (ConcurrentHashMap<K,V> map,
4959 IntBiFunction<? super K, ? super V> transformer,
4960 int basis,
4961 IntBinaryOperator reducer) {
4962 if (transformer == null || reducer == null)
4963 throw new NullPointerException();
4964 return new MapReduceMappingsToIntTask<K,V>
4965 (map, null, -1, null, transformer, basis, reducer);
4966 }
4967
4968 /**
4969 * Returns a task that when invoked, performs the given action
4970 * for each key.
4971 *
4972 * @param map the map
4973 * @param action the action
4974 * @return the task
4975 */
4976 public static <K,V> ForkJoinTask<Void> forEachKey
4977 (ConcurrentHashMap<K,V> map,
4978 Block<? super K> action) {
4979 if (action == null) throw new NullPointerException();
4980 return new ForEachKeyTask<K,V>(map, null, -1, action);
4981 }
4982
4983 /**
4984 * Returns a task that when invoked, performs the given action
4985 * for each non-null transformation of each key.
4986 *
4987 * @param map the map
4988 * @param transformer a function returning the transformation
4989 * for an element, or null if there is no transformation (in
4990 * which case the action is not applied)
4991 * @param action the action
4992 * @return the task
4993 */
4994 public static <K,V,U> ForkJoinTask<Void> forEachKey
4995 (ConcurrentHashMap<K,V> map,
4996 Function<? super K, ? extends U> transformer,
4997 Block<? super U> action) {
4998 if (transformer == null || action == null)
4999 throw new NullPointerException();
5000 return new ForEachTransformedKeyTask<K,V,U>
5001 (map, null, -1, transformer, action);
5002 }
5003
5004 /**
5005 * Returns a task that when invoked, returns a non-null result
5006 * from applying the given search function on each key, or
5007 * null if none. Upon success, further element processing is
5008 * suppressed and the results of any other parallel
5009 * invocations of the search function are ignored.
5010 *
5011 * @param map the map
5012 * @param searchFunction a function returning a non-null
5013 * result on success, else null
5014 * @return the task
5015 */
5016 public static <K,V,U> ForkJoinTask<U> searchKeys
5017 (ConcurrentHashMap<K,V> map,
5018 Function<? super K, ? extends U> searchFunction) {
5019 if (searchFunction == null) throw new NullPointerException();
5020 return new SearchKeysTask<K,V,U>
5021 (map, null, -1, searchFunction,
5022 new AtomicReference<U>());
5023 }
5024
5025 /**
5026 * Returns a task that when invoked, returns the result of
5027 * accumulating all keys using the given reducer to combine
5028 * values, or null if none.
5029 *
5030 * @param map the map
5031 * @param reducer a commutative associative combining function
5032 * @return the task
5033 */
5034 public static <K,V> ForkJoinTask<K> reduceKeys
5035 (ConcurrentHashMap<K,V> map,
5036 BiFunction<? super K, ? super K, ? extends K> reducer) {
5037 if (reducer == null) throw new NullPointerException();
5038 return new ReduceKeysTask<K,V>
5039 (map, null, -1, null, reducer);
5040 }
5041
5042 /**
5043 * Returns a task that when invoked, returns the result of
5044 * accumulating the given transformation of all keys using the given
5045 * reducer to combine values, or null if none.
5046 *
5047 * @param map the map
5048 * @param transformer a function returning the transformation
5049 * for an element, or null if there is no transformation (in
5050 * which case it is not combined).
5051 * @param reducer a commutative associative combining function
5052 * @return the task
5053 */
5054 public static <K,V,U> ForkJoinTask<U> reduceKeys
5055 (ConcurrentHashMap<K,V> map,
5056 Function<? super K, ? extends U> transformer,
5057 BiFunction<? super U, ? super U, ? extends U> reducer) {
5058 if (transformer == null || reducer == null)
5059 throw new NullPointerException();
5060 return new MapReduceKeysTask<K,V,U>
5061 (map, null, -1, null, transformer, reducer);
5062 }
5063
5064 /**
5065 * Returns a task that when invoked, returns the result of
5066 * accumulating the given transformation of all keys using the given
5067 * reducer to combine values, and the given basis as an
5068 * identity value.
5069 *
5070 * @param map the map
5071 * @param transformer a function returning the transformation
5072 * for an element
5073 * @param basis the identity (initial default value) for the reduction
5074 * @param reducer a commutative associative combining function
5075 * @return the task
5076 */
5077 public static <K,V> ForkJoinTask<Double> reduceKeysToDouble
5078 (ConcurrentHashMap<K,V> map,
5079 DoubleFunction<? super K> transformer,
5080 double basis,
5081 DoubleBinaryOperator reducer) {
5082 if (transformer == null || reducer == null)
5083 throw new NullPointerException();
5084 return new MapReduceKeysToDoubleTask<K,V>
5085 (map, null, -1, null, transformer, basis, reducer);
5086 }
5087
5088 /**
5089 * Returns a task that when invoked, returns the result of
5090 * accumulating the given transformation of all keys using the given
5091 * reducer to combine values, and the given basis as an
5092 * identity value.
5093 *
5094 * @param map the map
5095 * @param transformer a function returning the transformation
5096 * for an element
5097 * @param basis the identity (initial default value) for the reduction
5098 * @param reducer a commutative associative combining function
5099 * @return the task
5100 */
5101 public static <K,V> ForkJoinTask<Long> reduceKeysToLong
5102 (ConcurrentHashMap<K,V> map,
5103 LongFunction<? super K> transformer,
5104 long basis,
5105 LongBinaryOperator reducer) {
5106 if (transformer == null || reducer == null)
5107 throw new NullPointerException();
5108 return new MapReduceKeysToLongTask<K,V>
5109 (map, null, -1, null, transformer, basis, reducer);
5110 }
5111
5112 /**
5113 * Returns a task that when invoked, returns the result of
5114 * accumulating the given transformation of all keys using the given
5115 * reducer to combine values, and the given basis as an
5116 * identity value.
5117 *
5118 * @param map the map
5119 * @param transformer a function returning the transformation
5120 * for an element
5121 * @param basis the identity (initial default value) for the reduction
5122 * @param reducer a commutative associative combining function
5123 * @return the task
5124 */
5125 public static <K,V> ForkJoinTask<Integer> reduceKeysToInt
5126 (ConcurrentHashMap<K,V> map,
5127 IntFunction<? super K> transformer,
5128 int basis,
5129 IntBinaryOperator reducer) {
5130 if (transformer == null || reducer == null)
5131 throw new NullPointerException();
5132 return new MapReduceKeysToIntTask<K,V>
5133 (map, null, -1, null, transformer, basis, reducer);
5134 }
5135
5136 /**
5137 * Returns a task that when invoked, performs the given action
5138 * for each value.
5139 *
5140 * @param map the map
5141 * @param action the action
5142 */
5143 public static <K,V> ForkJoinTask<Void> forEachValue
5144 (ConcurrentHashMap<K,V> map,
5145 Block<? super V> action) {
5146 if (action == null) throw new NullPointerException();
5147 return new ForEachValueTask<K,V>(map, null, -1, action);
5148 }
5149
5150 /**
5151 * Returns a task that when invoked, performs the given action
5152 * for each non-null transformation of each value.
5153 *
5154 * @param map the map
5155 * @param transformer a function returning the transformation
5156 * for an element, or null if there is no transformation (in
5157 * which case the action is not applied)
5158 * @param action the action
5159 */
5160 public static <K,V,U> ForkJoinTask<Void> forEachValue
5161 (ConcurrentHashMap<K,V> map,
5162 Function<? super V, ? extends U> transformer,
5163 Block<? super U> action) {
5164 if (transformer == null || action == null)
5165 throw new NullPointerException();
5166 return new ForEachTransformedValueTask<K,V,U>
5167 (map, null, -1, transformer, action);
5168 }
5169
5170 /**
5171 * Returns a task that when invoked, returns a non-null result
5172 * from applying the given search function on each value, or
5173 * null if none. Upon success, further element processing is
5174 * suppressed and the results of any other parallel
5175 * invocations of the search function are ignored.
5176 *
5177 * @param map the map
5178 * @param searchFunction a function returning a non-null
5179 * result on success, else null
5180 * @return the task
5181 */
5182 public static <K,V,U> ForkJoinTask<U> searchValues
5183 (ConcurrentHashMap<K,V> map,
5184 Function<? super V, ? extends U> searchFunction) {
5185 if (searchFunction == null) throw new NullPointerException();
5186 return new SearchValuesTask<K,V,U>
5187 (map, null, -1, searchFunction,
5188 new AtomicReference<U>());
5189 }
5190
5191 /**
5192 * Returns a task that when invoked, returns the result of
5193 * accumulating all values using the given reducer to combine
5194 * values, or null if none.
5195 *
5196 * @param map the map
5197 * @param reducer a commutative associative combining function
5198 * @return the task
5199 */
5200 public static <K,V> ForkJoinTask<V> reduceValues
5201 (ConcurrentHashMap<K,V> map,
5202 BiFunction<? super V, ? super V, ? extends V> reducer) {
5203 if (reducer == null) throw new NullPointerException();
5204 return new ReduceValuesTask<K,V>
5205 (map, null, -1, null, reducer);
5206 }
5207
5208 /**
5209 * Returns a task that when invoked, returns the result of
5210 * accumulating the given transformation of all values using the
5211 * given reducer to combine values, or null if none.
5212 *
5213 * @param map the map
5214 * @param transformer a function returning the transformation
5215 * for an element, or null if there is no transformation (in
5216 * which case it is not combined).
5217 * @param reducer a commutative associative combining function
5218 * @return the task
5219 */
5220 public static <K,V,U> ForkJoinTask<U> reduceValues
5221 (ConcurrentHashMap<K,V> map,
5222 Function<? super V, ? extends U> transformer,
5223 BiFunction<? super U, ? super U, ? extends U> reducer) {
5224 if (transformer == null || reducer == null)
5225 throw new NullPointerException();
5226 return new MapReduceValuesTask<K,V,U>
5227 (map, null, -1, null, transformer, reducer);
5228 }
5229
5230 /**
5231 * Returns a task that when invoked, returns the result of
5232 * accumulating the given transformation of all values using the
5233 * given reducer to combine values, and the given basis as an
5234 * identity value.
5235 *
5236 * @param map the map
5237 * @param transformer a function returning the transformation
5238 * for an element
5239 * @param basis the identity (initial default value) for the reduction
5240 * @param reducer a commutative associative combining function
5241 * @return the task
5242 */
5243 public static <K,V> ForkJoinTask<Double> reduceValuesToDouble
5244 (ConcurrentHashMap<K,V> map,
5245 DoubleFunction<? super V> transformer,
5246 double basis,
5247 DoubleBinaryOperator reducer) {
5248 if (transformer == null || reducer == null)
5249 throw new NullPointerException();
5250 return new MapReduceValuesToDoubleTask<K,V>
5251 (map, null, -1, null, transformer, basis, reducer);
5252 }
5253
5254 /**
5255 * Returns a task that when invoked, returns the result of
5256 * accumulating the given transformation of all values using the
5257 * given reducer to combine values, and the given basis as an
5258 * identity value.
5259 *
5260 * @param map the map
5261 * @param transformer a function returning the transformation
5262 * for an element
5263 * @param basis the identity (initial default value) for the reduction
5264 * @param reducer a commutative associative combining function
5265 * @return the task
5266 */
5267 public static <K,V> ForkJoinTask<Long> reduceValuesToLong
5268 (ConcurrentHashMap<K,V> map,
5269 LongFunction<? super V> transformer,
5270 long basis,
5271 LongBinaryOperator reducer) {
5272 if (transformer == null || reducer == null)
5273 throw new NullPointerException();
5274 return new MapReduceValuesToLongTask<K,V>
5275 (map, null, -1, null, transformer, basis, reducer);
5276 }
5277
5278 /**
5279 * Returns a task that when invoked, returns the result of
5280 * accumulating the given transformation of all values using the
5281 * given reducer to combine values, and the given basis as an
5282 * identity value.
5283 *
5284 * @param map the map
5285 * @param transformer a function returning the transformation
5286 * for an element
5287 * @param basis the identity (initial default value) for the reduction
5288 * @param reducer a commutative associative combining function
5289 * @return the task
5290 */
5291 public static <K,V> ForkJoinTask<Integer> reduceValuesToInt
5292 (ConcurrentHashMap<K,V> map,
5293 IntFunction<? super V> transformer,
5294 int basis,
5295 IntBinaryOperator reducer) {
5296 if (transformer == null || reducer == null)
5297 throw new NullPointerException();
5298 return new MapReduceValuesToIntTask<K,V>
5299 (map, null, -1, null, transformer, basis, reducer);
5300 }
5301
5302 /**
5303 * Returns a task that when invoked, perform the given action
5304 * for each entry.
5305 *
5306 * @param map the map
5307 * @param action the action
5308 */
5309 public static <K,V> ForkJoinTask<Void> forEachEntry
5310 (ConcurrentHashMap<K,V> map,
5311 Block<? super Map.Entry<K,V>> action) {
5312 if (action == null) throw new NullPointerException();
5313 return new ForEachEntryTask<K,V>(map, null, -1, action);
5314 }
5315
5316 /**
5317 * Returns a task that when invoked, perform the given action
5318 * for each non-null transformation of each entry.
5319 *
5320 * @param map the map
5321 * @param transformer a function returning the transformation
5322 * for an element, or null if there is no transformation (in
5323 * which case the action is not applied)
5324 * @param action the action
5325 */
5326 public static <K,V,U> ForkJoinTask<Void> forEachEntry
5327 (ConcurrentHashMap<K,V> map,
5328 Function<Map.Entry<K,V>, ? extends U> transformer,
5329 Block<? super U> action) {
5330 if (transformer == null || action == null)
5331 throw new NullPointerException();
5332 return new ForEachTransformedEntryTask<K,V,U>
5333 (map, null, -1, transformer, action);
5334 }
5335
5336 /**
5337 * Returns a task that when invoked, returns a non-null result
5338 * from applying the given search function on each entry, or
5339 * null if none. Upon success, further element processing is
5340 * suppressed and the results of any other parallel
5341 * invocations of the search function are ignored.
5342 *
5343 * @param map the map
5344 * @param searchFunction a function returning a non-null
5345 * result on success, else null
5346 * @return the task
5347 */
5348 public static <K,V,U> ForkJoinTask<U> searchEntries
5349 (ConcurrentHashMap<K,V> map,
5350 Function<Map.Entry<K,V>, ? extends U> searchFunction) {
5351 if (searchFunction == null) throw new NullPointerException();
5352 return new SearchEntriesTask<K,V,U>
5353 (map, null, -1, searchFunction,
5354 new AtomicReference<U>());
5355 }
5356
5357 /**
5358 * Returns a task that when invoked, returns the result of
5359 * accumulating all entries using the given reducer to combine
5360 * values, or null if none.
5361 *
5362 * @param map the map
5363 * @param reducer a commutative associative combining function
5364 * @return the task
5365 */
5366 public static <K,V> ForkJoinTask<Map.Entry<K,V>> reduceEntries
5367 (ConcurrentHashMap<K,V> map,
5368 BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
5369 if (reducer == null) throw new NullPointerException();
5370 return new ReduceEntriesTask<K,V>
5371 (map, null, -1, null, reducer);
5372 }
5373
5374 /**
5375 * Returns a task that when invoked, returns the result of
5376 * accumulating the given transformation of all entries using the
5377 * given reducer to combine values, or null if none.
5378 *
5379 * @param map the map
5380 * @param transformer a function returning the transformation
5381 * for an element, or null if there is no transformation (in
5382 * which case it is not combined).
5383 * @param reducer a commutative associative combining function
5384 * @return the task
5385 */
5386 public static <K,V,U> ForkJoinTask<U> reduceEntries
5387 (ConcurrentHashMap<K,V> map,
5388 Function<Map.Entry<K,V>, ? extends U> transformer,
5389 BiFunction<? super U, ? super U, ? extends U> reducer) {
5390 if (transformer == null || reducer == null)
5391 throw new NullPointerException();
5392 return new MapReduceEntriesTask<K,V,U>
5393 (map, null, -1, null, transformer, reducer);
5394 }
5395
5396 /**
5397 * Returns a task that when invoked, returns the result of
5398 * accumulating the given transformation of all entries using the
5399 * given reducer to combine values, and the given basis as an
5400 * identity value.
5401 *
5402 * @param map the map
5403 * @param transformer a function returning the transformation
5404 * for an element
5405 * @param basis the identity (initial default value) for the reduction
5406 * @param reducer a commutative associative combining function
5407 * @return the task
5408 */
5409 public static <K,V> ForkJoinTask<Double> reduceEntriesToDouble
5410 (ConcurrentHashMap<K,V> map,
5411 DoubleFunction<Map.Entry<K,V>> transformer,
5412 double basis,
5413 DoubleBinaryOperator reducer) {
5414 if (transformer == null || reducer == null)
5415 throw new NullPointerException();
5416 return new MapReduceEntriesToDoubleTask<K,V>
5417 (map, null, -1, null, transformer, basis, reducer);
5418 }
5419
5420 /**
5421 * Returns a task that when invoked, returns the result of
5422 * accumulating the given transformation of all entries using the
5423 * given reducer to combine values, and the given basis as an
5424 * identity value.
5425 *
5426 * @param map the map
5427 * @param transformer a function returning the transformation
5428 * for an element
5429 * @param basis the identity (initial default value) for the reduction
5430 * @param reducer a commutative associative combining function
5431 * @return the task
5432 */
5433 public static <K,V> ForkJoinTask<Long> reduceEntriesToLong
5434 (ConcurrentHashMap<K,V> map,
5435 LongFunction<Map.Entry<K,V>> transformer,
5436 long basis,
5437 LongBinaryOperator reducer) {
5438 if (transformer == null || reducer == null)
5439 throw new NullPointerException();
5440 return new MapReduceEntriesToLongTask<K,V>
5441 (map, null, -1, null, transformer, basis, reducer);
5442 }
5443
5444 /**
5445 * Returns a task that when invoked, returns the result of
5446 * accumulating the given transformation of all entries using the
5447 * given reducer to combine values, and the given basis as an
5448 * identity value.
5449 *
5450 * @param map the map
5451 * @param transformer a function returning the transformation
5452 * for an element
5453 * @param basis the identity (initial default value) for the reduction
5454 * @param reducer a commutative associative combining function
5455 * @return the task
5456 */
5457 public static <K,V> ForkJoinTask<Integer> reduceEntriesToInt
5458 (ConcurrentHashMap<K,V> map,
5459 IntFunction<Map.Entry<K,V>> transformer,
5460 int basis,
5461 IntBinaryOperator reducer) {
5462 if (transformer == null || reducer == null)
5463 throw new NullPointerException();
5464 return new MapReduceEntriesToIntTask<K,V>
5465 (map, null, -1, null, transformer, basis, reducer);
5466 }
5467 }
5468
5469 // -------------------------------------------------------
5470
5471 /*
5472 * Task classes. Coded in a regular but ugly format/style to
5473 * simplify checks that each variant differs in the right way from
5474 * others. The null screenings exist because compilers cannot tell
5475 * that we've already null-checked task arguments, so we force
5476 * simplest hoisted bypass to help avoid convoluted traps.
5477 */
5478
5479 @SuppressWarnings("serial") static final class ForEachKeyTask<K,V>
5480 extends Traverser<K,V,Void> {
5481 final Block<? super K> action;
5482 ForEachKeyTask
5483 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5484 Block<? super K> action) {
5485 super(m, p, b);
5486 this.action = action;
5487 }
5488 @SuppressWarnings("unchecked") public final void compute() {
5489 final Block<? super K> action;
5490 if ((action = this.action) != null) {
5491 for (int b; (b = preSplit()) > 0;)
5492 new ForEachKeyTask<K,V>(map, this, b, action).fork();
5493 while (advance() != null)
5494 action.accept((K)nextKey);
5495 propagateCompletion();
5496 }
5497 }
5498 }
5499
5500 @SuppressWarnings("serial") static final class ForEachValueTask<K,V>
5501 extends Traverser<K,V,Void> {
5502 final Block<? super V> action;
5503 ForEachValueTask
5504 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5505 Block<? super V> action) {
5506 super(m, p, b);
5507 this.action = action;
5508 }
5509 @SuppressWarnings("unchecked") public final void compute() {
5510 final Block<? super V> action;
5511 if ((action = this.action) != null) {
5512 for (int b; (b = preSplit()) > 0;)
5513 new ForEachValueTask<K,V>(map, this, b, action).fork();
5514 V v;
5515 while ((v = advance()) != null)
5516 action.accept(v);
5517 propagateCompletion();
5518 }
5519 }
5520 }
5521
5522 @SuppressWarnings("serial") static final class ForEachEntryTask<K,V>
5523 extends Traverser<K,V,Void> {
5524 final Block<? super Entry<K,V>> action;
5525 ForEachEntryTask
5526 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5527 Block<? super Entry<K,V>> action) {
5528 super(m, p, b);
5529 this.action = action;
5530 }
5531 @SuppressWarnings("unchecked") public final void compute() {
5532 final Block<? super Entry<K,V>> action;
5533 if ((action = this.action) != null) {
5534 for (int b; (b = preSplit()) > 0;)
5535 new ForEachEntryTask<K,V>(map, this, b, action).fork();
5536 V v;
5537 while ((v = advance()) != null)
5538 action.accept(entryFor((K)nextKey, v));
5539 propagateCompletion();
5540 }
5541 }
5542 }
5543
5544 @SuppressWarnings("serial") static final class ForEachMappingTask<K,V>
5545 extends Traverser<K,V,Void> {
5546 final BiBlock<? super K, ? super V> action;
5547 ForEachMappingTask
5548 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5549 BiBlock<? super K,? super V> action) {
5550 super(m, p, b);
5551 this.action = action;
5552 }
5553 @SuppressWarnings("unchecked") public final void compute() {
5554 final BiBlock<? super K, ? super V> action;
5555 if ((action = this.action) != null) {
5556 for (int b; (b = preSplit()) > 0;)
5557 new ForEachMappingTask<K,V>(map, this, b, action).fork();
5558 V v;
5559 while ((v = advance()) != null)
5560 action.accept((K)nextKey, v);
5561 propagateCompletion();
5562 }
5563 }
5564 }
5565
5566 @SuppressWarnings("serial") static final class ForEachTransformedKeyTask<K,V,U>
5567 extends Traverser<K,V,Void> {
5568 final Function<? super K, ? extends U> transformer;
5569 final Block<? super U> action;
5570 ForEachTransformedKeyTask
5571 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5572 Function<? super K, ? extends U> transformer, Block<? super U> action) {
5573 super(m, p, b);
5574 this.transformer = transformer; this.action = action;
5575 }
5576 @SuppressWarnings("unchecked") public final void compute() {
5577 final Function<? super K, ? extends U> transformer;
5578 final Block<? super U> action;
5579 if ((transformer = this.transformer) != null &&
5580 (action = this.action) != null) {
5581 for (int b; (b = preSplit()) > 0;)
5582 new ForEachTransformedKeyTask<K,V,U>
5583 (map, this, b, transformer, action).fork();
5584 U u;
5585 while (advance() != null) {
5586 if ((u = transformer.apply((K)nextKey)) != null)
5587 action.accept(u);
5588 }
5589 propagateCompletion();
5590 }
5591 }
5592 }
5593
5594 @SuppressWarnings("serial") static final class ForEachTransformedValueTask<K,V,U>
5595 extends Traverser<K,V,Void> {
5596 final Function<? super V, ? extends U> transformer;
5597 final Block<? super U> action;
5598 ForEachTransformedValueTask
5599 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5600 Function<? super V, ? extends U> transformer, Block<? super U> action) {
5601 super(m, p, b);
5602 this.transformer = transformer; this.action = action;
5603 }
5604 @SuppressWarnings("unchecked") public final void compute() {
5605 final Function<? super V, ? extends U> transformer;
5606 final Block<? super U> action;
5607 if ((transformer = this.transformer) != null &&
5608 (action = this.action) != null) {
5609 for (int b; (b = preSplit()) > 0;)
5610 new ForEachTransformedValueTask<K,V,U>
5611 (map, this, b, transformer, action).fork();
5612 V v; U u;
5613 while ((v = advance()) != null) {
5614 if ((u = transformer.apply(v)) != null)
5615 action.accept(u);
5616 }
5617 propagateCompletion();
5618 }
5619 }
5620 }
5621
5622 @SuppressWarnings("serial") static final class ForEachTransformedEntryTask<K,V,U>
5623 extends Traverser<K,V,Void> {
5624 final Function<Map.Entry<K,V>, ? extends U> transformer;
5625 final Block<? super U> action;
5626 ForEachTransformedEntryTask
5627 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5628 Function<Map.Entry<K,V>, ? extends U> transformer, Block<? super U> action) {
5629 super(m, p, b);
5630 this.transformer = transformer; this.action = action;
5631 }
5632 @SuppressWarnings("unchecked") public final void compute() {
5633 final Function<Map.Entry<K,V>, ? extends U> transformer;
5634 final Block<? super U> action;
5635 if ((transformer = this.transformer) != null &&
5636 (action = this.action) != null) {
5637 for (int b; (b = preSplit()) > 0;)
5638 new ForEachTransformedEntryTask<K,V,U>
5639 (map, this, b, transformer, action).fork();
5640 V v; U u;
5641 while ((v = advance()) != null) {
5642 if ((u = transformer.apply(entryFor((K)nextKey,
5643 v))) != null)
5644 action.accept(u);
5645 }
5646 propagateCompletion();
5647 }
5648 }
5649 }
5650
5651 @SuppressWarnings("serial") static final class ForEachTransformedMappingTask<K,V,U>
5652 extends Traverser<K,V,Void> {
5653 final BiFunction<? super K, ? super V, ? extends U> transformer;
5654 final Block<? super U> action;
5655 ForEachTransformedMappingTask
5656 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5657 BiFunction<? super K, ? super V, ? extends U> transformer,
5658 Block<? super U> action) {
5659 super(m, p, b);
5660 this.transformer = transformer; this.action = action;
5661 }
5662 @SuppressWarnings("unchecked") public final void compute() {
5663 final BiFunction<? super K, ? super V, ? extends U> transformer;
5664 final Block<? super U> action;
5665 if ((transformer = this.transformer) != null &&
5666 (action = this.action) != null) {
5667 for (int b; (b = preSplit()) > 0;)
5668 new ForEachTransformedMappingTask<K,V,U>
5669 (map, this, b, transformer, action).fork();
5670 V v; U u;
5671 while ((v = advance()) != null) {
5672 if ((u = transformer.apply((K)nextKey, v)) != null)
5673 action.accept(u);
5674 }
5675 propagateCompletion();
5676 }
5677 }
5678 }
5679
5680 @SuppressWarnings("serial") static final class SearchKeysTask<K,V,U>
5681 extends Traverser<K,V,U> {
5682 final Function<? super K, ? extends U> searchFunction;
5683 final AtomicReference<U> result;
5684 SearchKeysTask
5685 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5686 Function<? super K, ? extends U> searchFunction,
5687 AtomicReference<U> result) {
5688 super(m, p, b);
5689 this.searchFunction = searchFunction; this.result = result;
5690 }
5691 public final U getRawResult() { return result.get(); }
5692 @SuppressWarnings("unchecked") public final void compute() {
5693 final Function<? super K, ? extends U> searchFunction;
5694 final AtomicReference<U> result;
5695 if ((searchFunction = this.searchFunction) != null &&
5696 (result = this.result) != null) {
5697 for (int b;;) {
5698 if (result.get() != null)
5699 return;
5700 if ((b = preSplit()) <= 0)
5701 break;
5702 new SearchKeysTask<K,V,U>
5703 (map, this, b, searchFunction, result).fork();
5704 }
5705 while (result.get() == null) {
5706 U u;
5707 if (advance() == null) {
5708 propagateCompletion();
5709 break;
5710 }
5711 if ((u = searchFunction.apply((K)nextKey)) != null) {
5712 if (result.compareAndSet(null, u))
5713 quietlyCompleteRoot();
5714 break;
5715 }
5716 }
5717 }
5718 }
5719 }
5720
5721 @SuppressWarnings("serial") static final class SearchValuesTask<K,V,U>
5722 extends Traverser<K,V,U> {
5723 final Function<? super V, ? extends U> searchFunction;
5724 final AtomicReference<U> result;
5725 SearchValuesTask
5726 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5727 Function<? super V, ? extends U> searchFunction,
5728 AtomicReference<U> result) {
5729 super(m, p, b);
5730 this.searchFunction = searchFunction; this.result = result;
5731 }
5732 public final U getRawResult() { return result.get(); }
5733 @SuppressWarnings("unchecked") public final void compute() {
5734 final Function<? super V, ? extends U> searchFunction;
5735 final AtomicReference<U> result;
5736 if ((searchFunction = this.searchFunction) != null &&
5737 (result = this.result) != null) {
5738 for (int b;;) {
5739 if (result.get() != null)
5740 return;
5741 if ((b = preSplit()) <= 0)
5742 break;
5743 new SearchValuesTask<K,V,U>
5744 (map, this, b, searchFunction, result).fork();
5745 }
5746 while (result.get() == null) {
5747 V v; U u;
5748 if ((v = advance()) == null) {
5749 propagateCompletion();
5750 break;
5751 }
5752 if ((u = searchFunction.apply(v)) != null) {
5753 if (result.compareAndSet(null, u))
5754 quietlyCompleteRoot();
5755 break;
5756 }
5757 }
5758 }
5759 }
5760 }
5761
5762 @SuppressWarnings("serial") static final class SearchEntriesTask<K,V,U>
5763 extends Traverser<K,V,U> {
5764 final Function<Entry<K,V>, ? extends U> searchFunction;
5765 final AtomicReference<U> result;
5766 SearchEntriesTask
5767 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5768 Function<Entry<K,V>, ? extends U> searchFunction,
5769 AtomicReference<U> result) {
5770 super(m, p, b);
5771 this.searchFunction = searchFunction; this.result = result;
5772 }
5773 public final U getRawResult() { return result.get(); }
5774 @SuppressWarnings("unchecked") public final void compute() {
5775 final Function<Entry<K,V>, ? extends U> searchFunction;
5776 final AtomicReference<U> result;
5777 if ((searchFunction = this.searchFunction) != null &&
5778 (result = this.result) != null) {
5779 for (int b;;) {
5780 if (result.get() != null)
5781 return;
5782 if ((b = preSplit()) <= 0)
5783 break;
5784 new SearchEntriesTask<K,V,U>
5785 (map, this, b, searchFunction, result).fork();
5786 }
5787 while (result.get() == null) {
5788 V v; U u;
5789 if ((v = advance()) == null) {
5790 propagateCompletion();
5791 break;
5792 }
5793 if ((u = searchFunction.apply(entryFor((K)nextKey,
5794 v))) != null) {
5795 if (result.compareAndSet(null, u))
5796 quietlyCompleteRoot();
5797 return;
5798 }
5799 }
5800 }
5801 }
5802 }
5803
5804 @SuppressWarnings("serial") static final class SearchMappingsTask<K,V,U>
5805 extends Traverser<K,V,U> {
5806 final BiFunction<? super K, ? super V, ? extends U> searchFunction;
5807 final AtomicReference<U> result;
5808 SearchMappingsTask
5809 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5810 BiFunction<? super K, ? super V, ? extends U> searchFunction,
5811 AtomicReference<U> result) {
5812 super(m, p, b);
5813 this.searchFunction = searchFunction; this.result = result;
5814 }
5815 public final U getRawResult() { return result.get(); }
5816 @SuppressWarnings("unchecked") public final void compute() {
5817 final BiFunction<? super K, ? super V, ? extends U> searchFunction;
5818 final AtomicReference<U> result;
5819 if ((searchFunction = this.searchFunction) != null &&
5820 (result = this.result) != null) {
5821 for (int b;;) {
5822 if (result.get() != null)
5823 return;
5824 if ((b = preSplit()) <= 0)
5825 break;
5826 new SearchMappingsTask<K,V,U>
5827 (map, this, b, searchFunction, result).fork();
5828 }
5829 while (result.get() == null) {
5830 V v; U u;
5831 if ((v = advance()) == null) {
5832 propagateCompletion();
5833 break;
5834 }
5835 if ((u = searchFunction.apply((K)nextKey, v)) != null) {
5836 if (result.compareAndSet(null, u))
5837 quietlyCompleteRoot();
5838 break;
5839 }
5840 }
5841 }
5842 }
5843 }
5844
5845 @SuppressWarnings("serial") static final class ReduceKeysTask<K,V>
5846 extends Traverser<K,V,K> {
5847 final BiFunction<? super K, ? super K, ? extends K> reducer;
5848 K result;
5849 ReduceKeysTask<K,V> rights, nextRight;
5850 ReduceKeysTask
5851 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5852 ReduceKeysTask<K,V> nextRight,
5853 BiFunction<? super K, ? super K, ? extends K> reducer) {
5854 super(m, p, b); this.nextRight = nextRight;
5855 this.reducer = reducer;
5856 }
5857 public final K getRawResult() { return result; }
5858 @SuppressWarnings("unchecked") public final void compute() {
5859 final BiFunction<? super K, ? super K, ? extends K> reducer;
5860 if ((reducer = this.reducer) != null) {
5861 for (int b; (b = preSplit()) > 0;)
5862 (rights = new ReduceKeysTask<K,V>
5863 (map, this, b, rights, reducer)).fork();
5864 K r = null;
5865 while (advance() != null) {
5866 K u = (K)nextKey;
5867 r = (r == null) ? u : u == null ? r : reducer.apply(r, u);
5868 }
5869 result = r;
5870 CountedCompleter<?> c;
5871 for (c = firstComplete(); c != null; c = c.nextComplete()) {
5872 ReduceKeysTask<K,V>
5873 t = (ReduceKeysTask<K,V>)c,
5874 s = t.rights;
5875 while (s != null) {
5876 K tr, sr;
5877 if ((sr = s.result) != null)
5878 t.result = (((tr = t.result) == null) ? sr :
5879 reducer.apply(tr, sr));
5880 s = t.rights = s.nextRight;
5881 }
5882 }
5883 }
5884 }
5885 }
5886
5887 @SuppressWarnings("serial") static final class ReduceValuesTask<K,V>
5888 extends Traverser<K,V,V> {
5889 final BiFunction<? super V, ? super V, ? extends V> reducer;
5890 V result;
5891 ReduceValuesTask<K,V> rights, nextRight;
5892 ReduceValuesTask
5893 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5894 ReduceValuesTask<K,V> nextRight,
5895 BiFunction<? super V, ? super V, ? extends V> reducer) {
5896 super(m, p, b); this.nextRight = nextRight;
5897 this.reducer = reducer;
5898 }
5899 public final V getRawResult() { return result; }
5900 @SuppressWarnings("unchecked") public final void compute() {
5901 final BiFunction<? super V, ? super V, ? extends V> reducer;
5902 if ((reducer = this.reducer) != null) {
5903 for (int b; (b = preSplit()) > 0;)
5904 (rights = new ReduceValuesTask<K,V>
5905 (map, this, b, rights, reducer)).fork();
5906 V r = null, v;
5907 while ((v = advance()) != null)
5908 r = (r == null) ? v : v == null ? r : reducer.apply(r, v);
5909 result = r;
5910 CountedCompleter<?> c;
5911 for (c = firstComplete(); c != null; c = c.nextComplete()) {
5912 ReduceValuesTask<K,V>
5913 t = (ReduceValuesTask<K,V>)c,
5914 s = t.rights;
5915 while (s != null) {
5916 V tr, sr;
5917 if ((sr = s.result) != null)
5918 t.result = (((tr = t.result) == null) ? sr :
5919 reducer.apply(tr, sr));
5920 s = t.rights = s.nextRight;
5921 }
5922 }
5923 }
5924 }
5925 }
5926
5927 @SuppressWarnings("serial") static final class ReduceEntriesTask<K,V>
5928 extends Traverser<K,V,Map.Entry<K,V>> {
5929 final BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer;
5930 Map.Entry<K,V> result;
5931 ReduceEntriesTask<K,V> rights, nextRight;
5932 ReduceEntriesTask
5933 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5934 ReduceEntriesTask<K,V> nextRight,
5935 BiFunction<Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
5936 super(m, p, b); this.nextRight = nextRight;
5937 this.reducer = reducer;
5938 }
5939 public final Map.Entry<K,V> getRawResult() { return result; }
5940 @SuppressWarnings("unchecked") public final void compute() {
5941 final BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer;
5942 if ((reducer = this.reducer) != null) {
5943 for (int b; (b = preSplit()) > 0;)
5944 (rights = new ReduceEntriesTask<K,V>
5945 (map, this, b, rights, reducer)).fork();
5946 Map.Entry<K,V> r = null;
5947 V v;
5948 while ((v = advance()) != null) {
5949 Map.Entry<K,V> u = entryFor((K)nextKey, v);
5950 r = (r == null) ? u : reducer.apply(r, u);
5951 }
5952 result = r;
5953 CountedCompleter<?> c;
5954 for (c = firstComplete(); c != null; c = c.nextComplete()) {
5955 ReduceEntriesTask<K,V>
5956 t = (ReduceEntriesTask<K,V>)c,
5957 s = t.rights;
5958 while (s != null) {
5959 Map.Entry<K,V> tr, sr;
5960 if ((sr = s.result) != null)
5961 t.result = (((tr = t.result) == null) ? sr :
5962 reducer.apply(tr, sr));
5963 s = t.rights = s.nextRight;
5964 }
5965 }
5966 }
5967 }
5968 }
5969
5970 @SuppressWarnings("serial") static final class MapReduceKeysTask<K,V,U>
5971 extends Traverser<K,V,U> {
5972 final Function<? super K, ? extends U> transformer;
5973 final BiFunction<? super U, ? super U, ? extends U> reducer;
5974 U result;
5975 MapReduceKeysTask<K,V,U> rights, nextRight;
5976 MapReduceKeysTask
5977 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5978 MapReduceKeysTask<K,V,U> nextRight,
5979 Function<? super K, ? extends U> transformer,
5980 BiFunction<? super U, ? super U, ? extends U> reducer) {
5981 super(m, p, b); this.nextRight = nextRight;
5982 this.transformer = transformer;
5983 this.reducer = reducer;
5984 }
5985 public final U getRawResult() { return result; }
5986 @SuppressWarnings("unchecked") public final void compute() {
5987 final Function<? super K, ? extends U> transformer;
5988 final BiFunction<? super U, ? super U, ? extends U> reducer;
5989 if ((transformer = this.transformer) != null &&
5990 (reducer = this.reducer) != null) {
5991 for (int b; (b = preSplit()) > 0;)
5992 (rights = new MapReduceKeysTask<K,V,U>
5993 (map, this, b, rights, transformer, reducer)).fork();
5994 U r = null, u;
5995 while (advance() != null) {
5996 if ((u = transformer.apply((K)nextKey)) != null)
5997 r = (r == null) ? u : reducer.apply(r, u);
5998 }
5999 result = r;
6000 CountedCompleter<?> c;
6001 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6002 MapReduceKeysTask<K,V,U>
6003 t = (MapReduceKeysTask<K,V,U>)c,
6004 s = t.rights;
6005 while (s != null) {
6006 U tr, sr;
6007 if ((sr = s.result) != null)
6008 t.result = (((tr = t.result) == null) ? sr :
6009 reducer.apply(tr, sr));
6010 s = t.rights = s.nextRight;
6011 }
6012 }
6013 }
6014 }
6015 }
6016
6017 @SuppressWarnings("serial") static final class MapReduceValuesTask<K,V,U>
6018 extends Traverser<K,V,U> {
6019 final Function<? super V, ? extends U> transformer;
6020 final BiFunction<? super U, ? super U, ? extends U> reducer;
6021 U result;
6022 MapReduceValuesTask<K,V,U> rights, nextRight;
6023 MapReduceValuesTask
6024 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6025 MapReduceValuesTask<K,V,U> nextRight,
6026 Function<? super V, ? extends U> transformer,
6027 BiFunction<? super U, ? super U, ? extends U> reducer) {
6028 super(m, p, b); this.nextRight = nextRight;
6029 this.transformer = transformer;
6030 this.reducer = reducer;
6031 }
6032 public final U getRawResult() { return result; }
6033 @SuppressWarnings("unchecked") public final void compute() {
6034 final Function<? super V, ? extends U> transformer;
6035 final BiFunction<? super U, ? super U, ? extends U> reducer;
6036 if ((transformer = this.transformer) != null &&
6037 (reducer = this.reducer) != null) {
6038 for (int b; (b = preSplit()) > 0;)
6039 (rights = new MapReduceValuesTask<K,V,U>
6040 (map, this, b, rights, transformer, reducer)).fork();
6041 U r = null, u;
6042 V v;
6043 while ((v = advance()) != null) {
6044 if ((u = transformer.apply(v)) != null)
6045 r = (r == null) ? u : reducer.apply(r, u);
6046 }
6047 result = r;
6048 CountedCompleter<?> c;
6049 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6050 MapReduceValuesTask<K,V,U>
6051 t = (MapReduceValuesTask<K,V,U>)c,
6052 s = t.rights;
6053 while (s != null) {
6054 U tr, sr;
6055 if ((sr = s.result) != null)
6056 t.result = (((tr = t.result) == null) ? sr :
6057 reducer.apply(tr, sr));
6058 s = t.rights = s.nextRight;
6059 }
6060 }
6061 }
6062 }
6063 }
6064
6065 @SuppressWarnings("serial") static final class MapReduceEntriesTask<K,V,U>
6066 extends Traverser<K,V,U> {
6067 final Function<Map.Entry<K,V>, ? extends U> transformer;
6068 final BiFunction<? super U, ? super U, ? extends U> reducer;
6069 U result;
6070 MapReduceEntriesTask<K,V,U> rights, nextRight;
6071 MapReduceEntriesTask
6072 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6073 MapReduceEntriesTask<K,V,U> nextRight,
6074 Function<Map.Entry<K,V>, ? extends U> transformer,
6075 BiFunction<? super U, ? super U, ? extends U> reducer) {
6076 super(m, p, b); this.nextRight = nextRight;
6077 this.transformer = transformer;
6078 this.reducer = reducer;
6079 }
6080 public final U getRawResult() { return result; }
6081 @SuppressWarnings("unchecked") public final void compute() {
6082 final Function<Map.Entry<K,V>, ? extends U> transformer;
6083 final BiFunction<? super U, ? super U, ? extends U> reducer;
6084 if ((transformer = this.transformer) != null &&
6085 (reducer = this.reducer) != null) {
6086 for (int b; (b = preSplit()) > 0;)
6087 (rights = new MapReduceEntriesTask<K,V,U>
6088 (map, this, b, rights, transformer, reducer)).fork();
6089 U r = null, u;
6090 V v;
6091 while ((v = advance()) != null) {
6092 if ((u = transformer.apply(entryFor((K)nextKey,
6093 v))) != null)
6094 r = (r == null) ? u : reducer.apply(r, u);
6095 }
6096 result = r;
6097 CountedCompleter<?> c;
6098 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6099 MapReduceEntriesTask<K,V,U>
6100 t = (MapReduceEntriesTask<K,V,U>)c,
6101 s = t.rights;
6102 while (s != null) {
6103 U tr, sr;
6104 if ((sr = s.result) != null)
6105 t.result = (((tr = t.result) == null) ? sr :
6106 reducer.apply(tr, sr));
6107 s = t.rights = s.nextRight;
6108 }
6109 }
6110 }
6111 }
6112 }
6113
6114 @SuppressWarnings("serial") static final class MapReduceMappingsTask<K,V,U>
6115 extends Traverser<K,V,U> {
6116 final BiFunction<? super K, ? super V, ? extends U> transformer;
6117 final BiFunction<? super U, ? super U, ? extends U> reducer;
6118 U result;
6119 MapReduceMappingsTask<K,V,U> rights, nextRight;
6120 MapReduceMappingsTask
6121 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6122 MapReduceMappingsTask<K,V,U> nextRight,
6123 BiFunction<? super K, ? super V, ? extends U> transformer,
6124 BiFunction<? super U, ? super U, ? extends U> reducer) {
6125 super(m, p, b); this.nextRight = nextRight;
6126 this.transformer = transformer;
6127 this.reducer = reducer;
6128 }
6129 public final U getRawResult() { return result; }
6130 @SuppressWarnings("unchecked") public final void compute() {
6131 final BiFunction<? super K, ? super V, ? extends U> transformer;
6132 final BiFunction<? super U, ? super U, ? extends U> reducer;
6133 if ((transformer = this.transformer) != null &&
6134 (reducer = this.reducer) != null) {
6135 for (int b; (b = preSplit()) > 0;)
6136 (rights = new MapReduceMappingsTask<K,V,U>
6137 (map, this, b, rights, transformer, reducer)).fork();
6138 U r = null, u;
6139 V v;
6140 while ((v = advance()) != null) {
6141 if ((u = transformer.apply((K)nextKey, v)) != null)
6142 r = (r == null) ? u : reducer.apply(r, u);
6143 }
6144 result = r;
6145 CountedCompleter<?> c;
6146 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6147 MapReduceMappingsTask<K,V,U>
6148 t = (MapReduceMappingsTask<K,V,U>)c,
6149 s = t.rights;
6150 while (s != null) {
6151 U tr, sr;
6152 if ((sr = s.result) != null)
6153 t.result = (((tr = t.result) == null) ? sr :
6154 reducer.apply(tr, sr));
6155 s = t.rights = s.nextRight;
6156 }
6157 }
6158 }
6159 }
6160 }
6161
6162 @SuppressWarnings("serial") static final class MapReduceKeysToDoubleTask<K,V>
6163 extends Traverser<K,V,Double> {
6164 final DoubleFunction<? super K> transformer;
6165 final DoubleBinaryOperator reducer;
6166 final double basis;
6167 double result;
6168 MapReduceKeysToDoubleTask<K,V> rights, nextRight;
6169 MapReduceKeysToDoubleTask
6170 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6171 MapReduceKeysToDoubleTask<K,V> nextRight,
6172 DoubleFunction<? super K> transformer,
6173 double basis,
6174 DoubleBinaryOperator reducer) {
6175 super(m, p, b); this.nextRight = nextRight;
6176 this.transformer = transformer;
6177 this.basis = basis; this.reducer = reducer;
6178 }
6179 public final Double getRawResult() { return result; }
6180 @SuppressWarnings("unchecked") public final void compute() {
6181 final DoubleFunction<? super K> transformer;
6182 final DoubleBinaryOperator reducer;
6183 if ((transformer = this.transformer) != null &&
6184 (reducer = this.reducer) != null) {
6185 double r = this.basis;
6186 for (int b; (b = preSplit()) > 0;)
6187 (rights = new MapReduceKeysToDoubleTask<K,V>
6188 (map, this, b, rights, transformer, r, reducer)).fork();
6189 while (advance() != null)
6190 r = reducer.applyAsDouble(r, transformer.applyAsDouble((K)nextKey));
6191 result = r;
6192 CountedCompleter<?> c;
6193 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6194 MapReduceKeysToDoubleTask<K,V>
6195 t = (MapReduceKeysToDoubleTask<K,V>)c,
6196 s = t.rights;
6197 while (s != null) {
6198 t.result = reducer.applyAsDouble(t.result, s.result);
6199 s = t.rights = s.nextRight;
6200 }
6201 }
6202 }
6203 }
6204 }
6205
6206 @SuppressWarnings("serial") static final class MapReduceValuesToDoubleTask<K,V>
6207 extends Traverser<K,V,Double> {
6208 final DoubleFunction<? super V> transformer;
6209 final DoubleBinaryOperator reducer;
6210 final double basis;
6211 double result;
6212 MapReduceValuesToDoubleTask<K,V> rights, nextRight;
6213 MapReduceValuesToDoubleTask
6214 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6215 MapReduceValuesToDoubleTask<K,V> nextRight,
6216 DoubleFunction<? super V> transformer,
6217 double basis,
6218 DoubleBinaryOperator reducer) {
6219 super(m, p, b); this.nextRight = nextRight;
6220 this.transformer = transformer;
6221 this.basis = basis; this.reducer = reducer;
6222 }
6223 public final Double getRawResult() { return result; }
6224 @SuppressWarnings("unchecked") public final void compute() {
6225 final DoubleFunction<? super V> transformer;
6226 final DoubleBinaryOperator reducer;
6227 if ((transformer = this.transformer) != null &&
6228 (reducer = this.reducer) != null) {
6229 double r = this.basis;
6230 for (int b; (b = preSplit()) > 0;)
6231 (rights = new MapReduceValuesToDoubleTask<K,V>
6232 (map, this, b, rights, transformer, r, reducer)).fork();
6233 V v;
6234 while ((v = advance()) != null)
6235 r = reducer.applyAsDouble(r, transformer.applyAsDouble(v));
6236 result = r;
6237 CountedCompleter<?> c;
6238 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6239 MapReduceValuesToDoubleTask<K,V>
6240 t = (MapReduceValuesToDoubleTask<K,V>)c,
6241 s = t.rights;
6242 while (s != null) {
6243 t.result = reducer.applyAsDouble(t.result, s.result);
6244 s = t.rights = s.nextRight;
6245 }
6246 }
6247 }
6248 }
6249 }
6250
6251 @SuppressWarnings("serial") static final class MapReduceEntriesToDoubleTask<K,V>
6252 extends Traverser<K,V,Double> {
6253 final DoubleFunction<Map.Entry<K,V>> transformer;
6254 final DoubleBinaryOperator reducer;
6255 final double basis;
6256 double result;
6257 MapReduceEntriesToDoubleTask<K,V> rights, nextRight;
6258 MapReduceEntriesToDoubleTask
6259 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6260 MapReduceEntriesToDoubleTask<K,V> nextRight,
6261 DoubleFunction<Map.Entry<K,V>> transformer,
6262 double basis,
6263 DoubleBinaryOperator reducer) {
6264 super(m, p, b); this.nextRight = nextRight;
6265 this.transformer = transformer;
6266 this.basis = basis; this.reducer = reducer;
6267 }
6268 public final Double getRawResult() { return result; }
6269 @SuppressWarnings("unchecked") public final void compute() {
6270 final DoubleFunction<Map.Entry<K,V>> transformer;
6271 final DoubleBinaryOperator reducer;
6272 if ((transformer = this.transformer) != null &&
6273 (reducer = this.reducer) != null) {
6274 double r = this.basis;
6275 for (int b; (b = preSplit()) > 0;)
6276 (rights = new MapReduceEntriesToDoubleTask<K,V>
6277 (map, this, b, rights, transformer, r, reducer)).fork();
6278 V v;
6279 while ((v = advance()) != null)
6280 r = reducer.applyAsDouble(r, transformer.applyAsDouble(entryFor((K)nextKey,
6281 v)));
6282 result = r;
6283 CountedCompleter<?> c;
6284 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6285 MapReduceEntriesToDoubleTask<K,V>
6286 t = (MapReduceEntriesToDoubleTask<K,V>)c,
6287 s = t.rights;
6288 while (s != null) {
6289 t.result = reducer.applyAsDouble(t.result, s.result);
6290 s = t.rights = s.nextRight;
6291 }
6292 }
6293 }
6294 }
6295 }
6296
6297 @SuppressWarnings("serial") static final class MapReduceMappingsToDoubleTask<K,V>
6298 extends Traverser<K,V,Double> {
6299 final DoubleBiFunction<? super K, ? super V> transformer;
6300 final DoubleBinaryOperator reducer;
6301 final double basis;
6302 double result;
6303 MapReduceMappingsToDoubleTask<K,V> rights, nextRight;
6304 MapReduceMappingsToDoubleTask
6305 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6306 MapReduceMappingsToDoubleTask<K,V> nextRight,
6307 DoubleBiFunction<? super K, ? super V> transformer,
6308 double basis,
6309 DoubleBinaryOperator reducer) {
6310 super(m, p, b); this.nextRight = nextRight;
6311 this.transformer = transformer;
6312 this.basis = basis; this.reducer = reducer;
6313 }
6314 public final Double getRawResult() { return result; }
6315 @SuppressWarnings("unchecked") public final void compute() {
6316 final DoubleBiFunction<? super K, ? super V> transformer;
6317 final DoubleBinaryOperator reducer;
6318 if ((transformer = this.transformer) != null &&
6319 (reducer = this.reducer) != null) {
6320 double r = this.basis;
6321 for (int b; (b = preSplit()) > 0;)
6322 (rights = new MapReduceMappingsToDoubleTask<K,V>
6323 (map, this, b, rights, transformer, r, reducer)).fork();
6324 V v;
6325 while ((v = advance()) != null)
6326 r = reducer.applyAsDouble(r, transformer.applyAsDouble((K)nextKey, v));
6327 result = r;
6328 CountedCompleter<?> c;
6329 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6330 MapReduceMappingsToDoubleTask<K,V>
6331 t = (MapReduceMappingsToDoubleTask<K,V>)c,
6332 s = t.rights;
6333 while (s != null) {
6334 t.result = reducer.applyAsDouble(t.result, s.result);
6335 s = t.rights = s.nextRight;
6336 }
6337 }
6338 }
6339 }
6340 }
6341
6342 @SuppressWarnings("serial") static final class MapReduceKeysToLongTask<K,V>
6343 extends Traverser<K,V,Long> {
6344 final LongFunction<? super K> transformer;
6345 final LongBinaryOperator reducer;
6346 final long basis;
6347 long result;
6348 MapReduceKeysToLongTask<K,V> rights, nextRight;
6349 MapReduceKeysToLongTask
6350 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6351 MapReduceKeysToLongTask<K,V> nextRight,
6352 LongFunction<? super K> transformer,
6353 long basis,
6354 LongBinaryOperator reducer) {
6355 super(m, p, b); this.nextRight = nextRight;
6356 this.transformer = transformer;
6357 this.basis = basis; this.reducer = reducer;
6358 }
6359 public final Long getRawResult() { return result; }
6360 @SuppressWarnings("unchecked") public final void compute() {
6361 final LongFunction<? super K> transformer;
6362 final LongBinaryOperator reducer;
6363 if ((transformer = this.transformer) != null &&
6364 (reducer = this.reducer) != null) {
6365 long r = this.basis;
6366 for (int b; (b = preSplit()) > 0;)
6367 (rights = new MapReduceKeysToLongTask<K,V>
6368 (map, this, b, rights, transformer, r, reducer)).fork();
6369 while (advance() != null)
6370 r = reducer.applyAsLong(r, transformer.applyAsLong((K)nextKey));
6371 result = r;
6372 CountedCompleter<?> c;
6373 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6374 MapReduceKeysToLongTask<K,V>
6375 t = (MapReduceKeysToLongTask<K,V>)c,
6376 s = t.rights;
6377 while (s != null) {
6378 t.result = reducer.applyAsLong(t.result, s.result);
6379 s = t.rights = s.nextRight;
6380 }
6381 }
6382 }
6383 }
6384 }
6385
6386 @SuppressWarnings("serial") static final class MapReduceValuesToLongTask<K,V>
6387 extends Traverser<K,V,Long> {
6388 final LongFunction<? super V> transformer;
6389 final LongBinaryOperator reducer;
6390 final long basis;
6391 long result;
6392 MapReduceValuesToLongTask<K,V> rights, nextRight;
6393 MapReduceValuesToLongTask
6394 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6395 MapReduceValuesToLongTask<K,V> nextRight,
6396 LongFunction<? super V> transformer,
6397 long basis,
6398 LongBinaryOperator reducer) {
6399 super(m, p, b); this.nextRight = nextRight;
6400 this.transformer = transformer;
6401 this.basis = basis; this.reducer = reducer;
6402 }
6403 public final Long getRawResult() { return result; }
6404 @SuppressWarnings("unchecked") public final void compute() {
6405 final LongFunction<? super V> transformer;
6406 final LongBinaryOperator reducer;
6407 if ((transformer = this.transformer) != null &&
6408 (reducer = this.reducer) != null) {
6409 long r = this.basis;
6410 for (int b; (b = preSplit()) > 0;)
6411 (rights = new MapReduceValuesToLongTask<K,V>
6412 (map, this, b, rights, transformer, r, reducer)).fork();
6413 V v;
6414 while ((v = advance()) != null)
6415 r = reducer.applyAsLong(r, transformer.applyAsLong(v));
6416 result = r;
6417 CountedCompleter<?> c;
6418 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6419 MapReduceValuesToLongTask<K,V>
6420 t = (MapReduceValuesToLongTask<K,V>)c,
6421 s = t.rights;
6422 while (s != null) {
6423 t.result = reducer.applyAsLong(t.result, s.result);
6424 s = t.rights = s.nextRight;
6425 }
6426 }
6427 }
6428 }
6429 }
6430
6431 @SuppressWarnings("serial") static final class MapReduceEntriesToLongTask<K,V>
6432 extends Traverser<K,V,Long> {
6433 final LongFunction<Map.Entry<K,V>> transformer;
6434 final LongBinaryOperator reducer;
6435 final long basis;
6436 long result;
6437 MapReduceEntriesToLongTask<K,V> rights, nextRight;
6438 MapReduceEntriesToLongTask
6439 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6440 MapReduceEntriesToLongTask<K,V> nextRight,
6441 LongFunction<Map.Entry<K,V>> transformer,
6442 long basis,
6443 LongBinaryOperator reducer) {
6444 super(m, p, b); this.nextRight = nextRight;
6445 this.transformer = transformer;
6446 this.basis = basis; this.reducer = reducer;
6447 }
6448 public final Long getRawResult() { return result; }
6449 @SuppressWarnings("unchecked") public final void compute() {
6450 final LongFunction<Map.Entry<K,V>> transformer;
6451 final LongBinaryOperator reducer;
6452 if ((transformer = this.transformer) != null &&
6453 (reducer = this.reducer) != null) {
6454 long r = this.basis;
6455 for (int b; (b = preSplit()) > 0;)
6456 (rights = new MapReduceEntriesToLongTask<K,V>
6457 (map, this, b, rights, transformer, r, reducer)).fork();
6458 V v;
6459 while ((v = advance()) != null)
6460 r = reducer.applyAsLong(r, transformer.applyAsLong(entryFor((K)nextKey, v)));
6461 result = r;
6462 CountedCompleter<?> c;
6463 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6464 MapReduceEntriesToLongTask<K,V>
6465 t = (MapReduceEntriesToLongTask<K,V>)c,
6466 s = t.rights;
6467 while (s != null) {
6468 t.result = reducer.applyAsLong(t.result, s.result);
6469 s = t.rights = s.nextRight;
6470 }
6471 }
6472 }
6473 }
6474 }
6475
6476 @SuppressWarnings("serial") static final class MapReduceMappingsToLongTask<K,V>
6477 extends Traverser<K,V,Long> {
6478 final LongBiFunction<? super K, ? super V> transformer;
6479 final LongBinaryOperator reducer;
6480 final long basis;
6481 long result;
6482 MapReduceMappingsToLongTask<K,V> rights, nextRight;
6483 MapReduceMappingsToLongTask
6484 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6485 MapReduceMappingsToLongTask<K,V> nextRight,
6486 LongBiFunction<? super K, ? super V> transformer,
6487 long basis,
6488 LongBinaryOperator reducer) {
6489 super(m, p, b); this.nextRight = nextRight;
6490 this.transformer = transformer;
6491 this.basis = basis; this.reducer = reducer;
6492 }
6493 public final Long getRawResult() { return result; }
6494 @SuppressWarnings("unchecked") public final void compute() {
6495 final LongBiFunction<? super K, ? super V> transformer;
6496 final LongBinaryOperator reducer;
6497 if ((transformer = this.transformer) != null &&
6498 (reducer = this.reducer) != null) {
6499 long r = this.basis;
6500 for (int b; (b = preSplit()) > 0;)
6501 (rights = new MapReduceMappingsToLongTask<K,V>
6502 (map, this, b, rights, transformer, r, reducer)).fork();
6503 V v;
6504 while ((v = advance()) != null)
6505 r = reducer.applyAsLong(r, transformer.applyAsLong((K)nextKey, v));
6506 result = r;
6507 CountedCompleter<?> c;
6508 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6509 MapReduceMappingsToLongTask<K,V>
6510 t = (MapReduceMappingsToLongTask<K,V>)c,
6511 s = t.rights;
6512 while (s != null) {
6513 t.result = reducer.applyAsLong(t.result, s.result);
6514 s = t.rights = s.nextRight;
6515 }
6516 }
6517 }
6518 }
6519 }
6520
6521 @SuppressWarnings("serial") static final class MapReduceKeysToIntTask<K,V>
6522 extends Traverser<K,V,Integer> {
6523 final IntFunction<? super K> transformer;
6524 final IntBinaryOperator reducer;
6525 final int basis;
6526 int result;
6527 MapReduceKeysToIntTask<K,V> rights, nextRight;
6528 MapReduceKeysToIntTask
6529 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6530 MapReduceKeysToIntTask<K,V> nextRight,
6531 IntFunction<? super K> transformer,
6532 int basis,
6533 IntBinaryOperator reducer) {
6534 super(m, p, b); this.nextRight = nextRight;
6535 this.transformer = transformer;
6536 this.basis = basis; this.reducer = reducer;
6537 }
6538 public final Integer getRawResult() { return result; }
6539 @SuppressWarnings("unchecked") public final void compute() {
6540 final IntFunction<? super K> transformer;
6541 final IntBinaryOperator reducer;
6542 if ((transformer = this.transformer) != null &&
6543 (reducer = this.reducer) != null) {
6544 int r = this.basis;
6545 for (int b; (b = preSplit()) > 0;)
6546 (rights = new MapReduceKeysToIntTask<K,V>
6547 (map, this, b, rights, transformer, r, reducer)).fork();
6548 while (advance() != null)
6549 r = reducer.applyAsInt(r, transformer.applyAsInt((K)nextKey));
6550 result = r;
6551 CountedCompleter<?> c;
6552 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6553 MapReduceKeysToIntTask<K,V>
6554 t = (MapReduceKeysToIntTask<K,V>)c,
6555 s = t.rights;
6556 while (s != null) {
6557 t.result = reducer.applyAsInt(t.result, s.result);
6558 s = t.rights = s.nextRight;
6559 }
6560 }
6561 }
6562 }
6563 }
6564
6565 @SuppressWarnings("serial") static final class MapReduceValuesToIntTask<K,V>
6566 extends Traverser<K,V,Integer> {
6567 final IntFunction<? super V> transformer;
6568 final IntBinaryOperator reducer;
6569 final int basis;
6570 int result;
6571 MapReduceValuesToIntTask<K,V> rights, nextRight;
6572 MapReduceValuesToIntTask
6573 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6574 MapReduceValuesToIntTask<K,V> nextRight,
6575 IntFunction<? super V> transformer,
6576 int basis,
6577 IntBinaryOperator reducer) {
6578 super(m, p, b); this.nextRight = nextRight;
6579 this.transformer = transformer;
6580 this.basis = basis; this.reducer = reducer;
6581 }
6582 public final Integer getRawResult() { return result; }
6583 @SuppressWarnings("unchecked") public final void compute() {
6584 final IntFunction<? super V> transformer;
6585 final IntBinaryOperator reducer;
6586 if ((transformer = this.transformer) != null &&
6587 (reducer = this.reducer) != null) {
6588 int r = this.basis;
6589 for (int b; (b = preSplit()) > 0;)
6590 (rights = new MapReduceValuesToIntTask<K,V>
6591 (map, this, b, rights, transformer, r, reducer)).fork();
6592 V v;
6593 while ((v = advance()) != null)
6594 r = reducer.applyAsInt(r, transformer.applyAsInt(v));
6595 result = r;
6596 CountedCompleter<?> c;
6597 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6598 MapReduceValuesToIntTask<K,V>
6599 t = (MapReduceValuesToIntTask<K,V>)c,
6600 s = t.rights;
6601 while (s != null) {
6602 t.result = reducer.applyAsInt(t.result, s.result);
6603 s = t.rights = s.nextRight;
6604 }
6605 }
6606 }
6607 }
6608 }
6609
6610 @SuppressWarnings("serial") static final class MapReduceEntriesToIntTask<K,V>
6611 extends Traverser<K,V,Integer> {
6612 final IntFunction<Map.Entry<K,V>> transformer;
6613 final IntBinaryOperator reducer;
6614 final int basis;
6615 int result;
6616 MapReduceEntriesToIntTask<K,V> rights, nextRight;
6617 MapReduceEntriesToIntTask
6618 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6619 MapReduceEntriesToIntTask<K,V> nextRight,
6620 IntFunction<Map.Entry<K,V>> transformer,
6621 int basis,
6622 IntBinaryOperator reducer) {
6623 super(m, p, b); this.nextRight = nextRight;
6624 this.transformer = transformer;
6625 this.basis = basis; this.reducer = reducer;
6626 }
6627 public final Integer getRawResult() { return result; }
6628 @SuppressWarnings("unchecked") public final void compute() {
6629 final IntFunction<Map.Entry<K,V>> transformer;
6630 final IntBinaryOperator reducer;
6631 if ((transformer = this.transformer) != null &&
6632 (reducer = this.reducer) != null) {
6633 int r = this.basis;
6634 for (int b; (b = preSplit()) > 0;)
6635 (rights = new MapReduceEntriesToIntTask<K,V>
6636 (map, this, b, rights, transformer, r, reducer)).fork();
6637 V v;
6638 while ((v = advance()) != null)
6639 r = reducer.applyAsInt(r, transformer.applyAsInt(entryFor((K)nextKey,
6640 v)));
6641 result = r;
6642 CountedCompleter<?> c;
6643 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6644 MapReduceEntriesToIntTask<K,V>
6645 t = (MapReduceEntriesToIntTask<K,V>)c,
6646 s = t.rights;
6647 while (s != null) {
6648 t.result = reducer.applyAsInt(t.result, s.result);
6649 s = t.rights = s.nextRight;
6650 }
6651 }
6652 }
6653 }
6654 }
6655
6656 @SuppressWarnings("serial") static final class MapReduceMappingsToIntTask<K,V>
6657 extends Traverser<K,V,Integer> {
6658 final IntBiFunction<? super K, ? super V> transformer;
6659 final IntBinaryOperator reducer;
6660 final int basis;
6661 int result;
6662 MapReduceMappingsToIntTask<K,V> rights, nextRight;
6663 MapReduceMappingsToIntTask
6664 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6665 MapReduceMappingsToIntTask<K,V> nextRight,
6666 IntBiFunction<? super K, ? super V> transformer,
6667 int basis,
6668 IntBinaryOperator reducer) {
6669 super(m, p, b); this.nextRight = nextRight;
6670 this.transformer = transformer;
6671 this.basis = basis; this.reducer = reducer;
6672 }
6673 public final Integer getRawResult() { return result; }
6674 @SuppressWarnings("unchecked") public final void compute() {
6675 final IntBiFunction<? super K, ? super V> transformer;
6676 final IntBinaryOperator reducer;
6677 if ((transformer = this.transformer) != null &&
6678 (reducer = this.reducer) != null) {
6679 int r = this.basis;
6680 for (int b; (b = preSplit()) > 0;)
6681 (rights = new MapReduceMappingsToIntTask<K,V>
6682 (map, this, b, rights, transformer, r, reducer)).fork();
6683 V v;
6684 while ((v = advance()) != null)
6685 r = reducer.applyAsInt(r, transformer.applyAsInt((K)nextKey, v));
6686 result = r;
6687 CountedCompleter<?> c;
6688 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6689 MapReduceMappingsToIntTask<K,V>
6690 t = (MapReduceMappingsToIntTask<K,V>)c,
6691 s = t.rights;
6692 while (s != null) {
6693 t.result = reducer.applyAsInt(t.result, s.result);
6694 s = t.rights = s.nextRight;
6695 }
6696 }
6697 }
6698 }
6699 }
6700
6701 // Unsafe mechanics
6702 private static final sun.misc.Unsafe U;
6703 private static final long SIZECTL;
6704 private static final long TRANSFERINDEX;
6705 private static final long TRANSFERORIGIN;
6706 private static final long BASECOUNT;
6707 private static final long CELLSBUSY;
6708 private static final long CELLVALUE;
6709 private static final long ABASE;
6710 private static final int ASHIFT;
6711
6712 static {
6713 int ss;
6714 try {
6715 U = sun.misc.Unsafe.getUnsafe();
6716 Class<?> k = ConcurrentHashMap.class;
6717 SIZECTL = U.objectFieldOffset
6718 (k.getDeclaredField("sizeCtl"));
6719 TRANSFERINDEX = U.objectFieldOffset
6720 (k.getDeclaredField("transferIndex"));
6721 TRANSFERORIGIN = U.objectFieldOffset
6722 (k.getDeclaredField("transferOrigin"));
6723 BASECOUNT = U.objectFieldOffset
6724 (k.getDeclaredField("baseCount"));
6725 CELLSBUSY = U.objectFieldOffset
6726 (k.getDeclaredField("cellsBusy"));
6727 Class<?> ck = Cell.class;
6728 CELLVALUE = U.objectFieldOffset
6729 (ck.getDeclaredField("value"));
6730 Class<?> sc = Node[].class;
6731 ABASE = U.arrayBaseOffset(sc);
6732 ss = U.arrayIndexScale(sc);
6733 ASHIFT = 31 - Integer.numberOfLeadingZeros(ss);
6734 } catch (Exception e) {
6735 throw new Error(e);
6736 }
6737 if ((ss & (ss-1)) != 0)
6738 throw new Error("data type scale not a power of two");
6739 }
6740
6741 }