ViewVC Help
View File | Revision Log | Show Annotations | Download File | Root Listing
root/jsr166/jsr166/src/main/java/util/concurrent/ConcurrentHashMap.java
Revision: 1.159
Committed: Sun Jan 6 20:05:51 2013 UTC (11 years, 4 months ago) by jsr166
Branch: MAIN
Changes since 1.158: +1 -1 lines
Log Message:
javadoc style

File Contents

# Content
1 /*
2 * Written by Doug Lea with assistance from members of JCP JSR-166
3 * Expert Group and released to the public domain, as explained at
4 * http://creativecommons.org/publicdomain/zero/1.0/
5 */
6
7 package java.util.concurrent;
8 import java.util.concurrent.ForkJoinPool;
9 import java.util.concurrent.CountedCompleter;
10 import java.util.function.*;
11 import java.util.stream.Spliterator;
12 import java.util.stream.Stream;
13 import java.util.stream.Streams;
14
15 import java.util.Comparator;
16 import java.util.Arrays;
17 import java.util.Map;
18 import java.util.Set;
19 import java.util.Collection;
20 import java.util.AbstractMap;
21 import java.util.AbstractSet;
22 import java.util.AbstractCollection;
23 import java.util.Hashtable;
24 import java.util.HashMap;
25 import java.util.Iterator;
26 import java.util.Enumeration;
27 import java.util.ConcurrentModificationException;
28 import java.util.NoSuchElementException;
29 import java.util.concurrent.ConcurrentMap;
30 import java.util.concurrent.locks.AbstractQueuedSynchronizer;
31 import java.util.concurrent.atomic.AtomicInteger;
32 import java.util.concurrent.atomic.AtomicReference;
33 import java.io.Serializable;
34
35 /**
36 * A hash table supporting full concurrency of retrievals and
37 * high expected concurrency for updates. This class obeys the
38 * same functional specification as {@link java.util.Hashtable}, and
39 * includes versions of methods corresponding to each method of
40 * {@code Hashtable}. However, even though all operations are
41 * thread-safe, retrieval operations do <em>not</em> entail locking,
42 * and there is <em>not</em> any support for locking the entire table
43 * in a way that prevents all access. This class is fully
44 * interoperable with {@code Hashtable} in programs that rely on its
45 * thread safety but not on its synchronization details.
46 *
47 * <p>Retrieval operations (including {@code get}) generally do not
48 * block, so may overlap with update operations (including {@code put}
49 * and {@code remove}). Retrievals reflect the results of the most
50 * recently <em>completed</em> update operations holding upon their
51 * onset. (More formally, an update operation for a given key bears a
52 * <em>happens-before</em> relation with any (non-null) retrieval for
53 * that key reporting the updated value.) For aggregate operations
54 * such as {@code putAll} and {@code clear}, concurrent retrievals may
55 * reflect insertion or removal of only some entries. Similarly,
56 * Iterators and Enumerations return elements reflecting the state of
57 * the hash table at some point at or since the creation of the
58 * iterator/enumeration. They do <em>not</em> throw {@link
59 * ConcurrentModificationException}. However, iterators are designed
60 * to be used by only one thread at a time. Bear in mind that the
61 * results of aggregate status methods including {@code size}, {@code
62 * isEmpty}, and {@code containsValue} are typically useful only when
63 * a map is not undergoing concurrent updates in other threads.
64 * Otherwise the results of these methods reflect transient states
65 * that may be adequate for monitoring or estimation purposes, but not
66 * for program control.
67 *
68 * <p>The table is dynamically expanded when there are too many
69 * collisions (i.e., keys that have distinct hash codes but fall into
70 * the same slot modulo the table size), with the expected average
71 * effect of maintaining roughly two bins per mapping (corresponding
72 * to a 0.75 load factor threshold for resizing). There may be much
73 * variance around this average as mappings are added and removed, but
74 * overall, this maintains a commonly accepted time/space tradeoff for
75 * hash tables. However, resizing this or any other kind of hash
76 * table may be a relatively slow operation. When possible, it is a
77 * good idea to provide a size estimate as an optional {@code
78 * initialCapacity} constructor argument. An additional optional
79 * {@code loadFactor} constructor argument provides a further means of
80 * customizing initial table capacity by specifying the table density
81 * to be used in calculating the amount of space to allocate for the
82 * given number of elements. Also, for compatibility with previous
83 * versions of this class, constructors may optionally specify an
84 * expected {@code concurrencyLevel} as an additional hint for
85 * internal sizing. Note that using many keys with exactly the same
86 * {@code hashCode()} is a sure way to slow down performance of any
87 * hash table.
88 *
89 * <p>A {@link Set} projection of a ConcurrentHashMap may be created
90 * (using {@link #newKeySet()} or {@link #newKeySet(int)}), or viewed
91 * (using {@link #keySet(Object)} when only keys are of interest, and the
92 * mapped values are (perhaps transiently) not used or all take the
93 * same mapping value.
94 *
95 * <p>A ConcurrentHashMap can be used as scalable frequency map (a
96 * form of histogram or multiset) by using {@link
97 * java.util.concurrent.atomic.LongAdder} values and initializing via
98 * {@link #computeIfAbsent}. For example, to add a count to a {@code
99 * ConcurrentHashMap<String,LongAdder> freqs}, you can use {@code
100 * freqs.computeIfAbsent(k -> new LongAdder()).increment();}
101 *
102 * <p>This class and its views and iterators implement all of the
103 * <em>optional</em> methods of the {@link Map} and {@link Iterator}
104 * interfaces.
105 *
106 * <p>Like {@link Hashtable} but unlike {@link HashMap}, this class
107 * does <em>not</em> allow {@code null} to be used as a key or value.
108 *
109 * <p>ConcurrentHashMaps support sequential and parallel operations
110 * bulk operations. (Parallel forms use the {@link
111 * ForkJoinPool#commonPool()}). Tasks that may be used in other
112 * contexts are available in class {@link ForkJoinTasks}. These
113 * operations are designed to be safely, and often sensibly, applied
114 * even with maps that are being concurrently updated by other
115 * threads; for example, when computing a snapshot summary of the
116 * values in a shared registry. There are three kinds of operation,
117 * each with four forms, accepting functions with Keys, Values,
118 * Entries, and (Key, Value) arguments and/or return values. Because
119 * the elements of a ConcurrentHashMap are not ordered in any
120 * particular way, and may be processed in different orders in
121 * different parallel executions, the correctness of supplied
122 * functions should not depend on any ordering, or on any other
123 * objects or values that may transiently change while computation is
124 * in progress; and except for forEach actions, should ideally be
125 * side-effect-free.
126 *
127 * <ul>
128 * <li> forEach: Perform a given action on each element.
129 * A variant form applies a given transformation on each element
130 * before performing the action.</li>
131 *
132 * <li> search: Return the first available non-null result of
133 * applying a given function on each element; skipping further
134 * search when a result is found.</li>
135 *
136 * <li> reduce: Accumulate each element. The supplied reduction
137 * function cannot rely on ordering (more formally, it should be
138 * both associative and commutative). There are five variants:
139 *
140 * <ul>
141 *
142 * <li> Plain reductions. (There is not a form of this method for
143 * (key, value) function arguments since there is no corresponding
144 * return type.)</li>
145 *
146 * <li> Mapped reductions that accumulate the results of a given
147 * function applied to each element.</li>
148 *
149 * <li> Reductions to scalar doubles, longs, and ints, using a
150 * given basis value.</li>
151 *
152 * </li>
153 * </ul>
154 * </ul>
155 *
156 * <p>The concurrency properties of bulk operations follow
157 * from those of ConcurrentHashMap: Any non-null result returned
158 * from {@code get(key)} and related access methods bears a
159 * happens-before relation with the associated insertion or
160 * update. The result of any bulk operation reflects the
161 * composition of these per-element relations (but is not
162 * necessarily atomic with respect to the map as a whole unless it
163 * is somehow known to be quiescent). Conversely, because keys
164 * and values in the map are never null, null serves as a reliable
165 * atomic indicator of the current lack of any result. To
166 * maintain this property, null serves as an implicit basis for
167 * all non-scalar reduction operations. For the double, long, and
168 * int versions, the basis should be one that, when combined with
169 * any other value, returns that other value (more formally, it
170 * should be the identity element for the reduction). Most common
171 * reductions have these properties; for example, computing a sum
172 * with basis 0 or a minimum with basis MAX_VALUE.
173 *
174 * <p>Search and transformation functions provided as arguments
175 * should similarly return null to indicate the lack of any result
176 * (in which case it is not used). In the case of mapped
177 * reductions, this also enables transformations to serve as
178 * filters, returning null (or, in the case of primitive
179 * specializations, the identity basis) if the element should not
180 * be combined. You can create compound transformations and
181 * filterings by composing them yourself under this "null means
182 * there is nothing there now" rule before using them in search or
183 * reduce operations.
184 *
185 * <p>Methods accepting and/or returning Entry arguments maintain
186 * key-value associations. They may be useful for example when
187 * finding the key for the greatest value. Note that "plain" Entry
188 * arguments can be supplied using {@code new
189 * AbstractMap.SimpleEntry(k,v)}.
190 *
191 * <p>Bulk operations may complete abruptly, throwing an
192 * exception encountered in the application of a supplied
193 * function. Bear in mind when handling such exceptions that other
194 * concurrently executing functions could also have thrown
195 * exceptions, or would have done so if the first exception had
196 * not occurred.
197 *
198 * <p>Speedups for parallel compared to sequential forms are common
199 * but not guaranteed. Parallel operations involving brief functions
200 * on small maps may execute more slowly than sequential forms if the
201 * underlying work to parallelize the computation is more expensive
202 * than the computation itself. Similarly, parallelization may not
203 * lead to much actual parallelism if all processors are busy
204 * performing unrelated tasks.
205 *
206 * <p>All arguments to all task methods must be non-null.
207 *
208 * <p>This class is a member of the
209 * <a href="{@docRoot}/../technotes/guides/collections/index.html">
210 * Java Collections Framework</a>.
211 *
212 * @since 1.5
213 * @author Doug Lea
214 * @param <K> the type of keys maintained by this map
215 * @param <V> the type of mapped values
216 */
217 public class ConcurrentHashMap<K, V>
218 implements ConcurrentMap<K, V>, Serializable {
219 private static final long serialVersionUID = 7249069246763182397L;
220
221 /*
222 * Overview:
223 *
224 * The primary design goal of this hash table is to maintain
225 * concurrent readability (typically method get(), but also
226 * iterators and related methods) while minimizing update
227 * contention. Secondary goals are to keep space consumption about
228 * the same or better than java.util.HashMap, and to support high
229 * initial insertion rates on an empty table by many threads.
230 *
231 * Each key-value mapping is held in a Node. Because Node key
232 * fields can contain special values, they are defined using plain
233 * Object types (not type "K"). This leads to a lot of explicit
234 * casting (and many explicit warning suppressions to tell
235 * compilers not to complain about it). It also allows some of the
236 * public methods to be factored into a smaller number of internal
237 * methods (although sadly not so for the five variants of
238 * put-related operations). The validation-based approach
239 * explained below leads to a lot of code sprawl because
240 * retry-control precludes factoring into smaller methods.
241 *
242 * The table is lazily initialized to a power-of-two size upon the
243 * first insertion. Each bin in the table normally contains a
244 * list of Nodes (most often, the list has only zero or one Node).
245 * Table accesses require volatile/atomic reads, writes, and
246 * CASes. Because there is no other way to arrange this without
247 * adding further indirections, we use intrinsics
248 * (sun.misc.Unsafe) operations. The lists of nodes within bins
249 * are always accurately traversable under volatile reads, so long
250 * as lookups check hash code and non-nullness of value before
251 * checking key equality.
252 *
253 * We use the top (sign) bit of Node hash fields for control
254 * purposes -- it is available anyway because of addressing
255 * constraints. Nodes with negative hash fields are forwarding
256 * nodes to either TreeBins or resized tables. The lower 31 bits
257 * of each normal Node's hash field contain a transformation of
258 * the key's hash code.
259 *
260 * Insertion (via put or its variants) of the first node in an
261 * empty bin is performed by just CASing it to the bin. This is
262 * by far the most common case for put operations under most
263 * key/hash distributions. Other update operations (insert,
264 * delete, and replace) require locks. We do not want to waste
265 * the space required to associate a distinct lock object with
266 * each bin, so instead use the first node of a bin list itself as
267 * a lock. Locking support for these locks relies on builtin
268 * "synchronized" monitors.
269 *
270 * Using the first node of a list as a lock does not by itself
271 * suffice though: When a node is locked, any update must first
272 * validate that it is still the first node after locking it, and
273 * retry if not. Because new nodes are always appended to lists,
274 * once a node is first in a bin, it remains first until deleted
275 * or the bin becomes invalidated (upon resizing). However,
276 * operations that only conditionally update may inspect nodes
277 * until the point of update. This is a converse of sorts to the
278 * lazy locking technique described by Herlihy & Shavit.
279 *
280 * The main disadvantage of per-bin locks is that other update
281 * operations on other nodes in a bin list protected by the same
282 * lock can stall, for example when user equals() or mapping
283 * functions take a long time. However, statistically, under
284 * random hash codes, this is not a common problem. Ideally, the
285 * frequency of nodes in bins follows a Poisson distribution
286 * (http://en.wikipedia.org/wiki/Poisson_distribution) with a
287 * parameter of about 0.5 on average, given the resizing threshold
288 * of 0.75, although with a large variance because of resizing
289 * granularity. Ignoring variance, the expected occurrences of
290 * list size k are (exp(-0.5) * pow(0.5, k) / factorial(k)). The
291 * first values are:
292 *
293 * 0: 0.60653066
294 * 1: 0.30326533
295 * 2: 0.07581633
296 * 3: 0.01263606
297 * 4: 0.00157952
298 * 5: 0.00015795
299 * 6: 0.00001316
300 * 7: 0.00000094
301 * 8: 0.00000006
302 * more: less than 1 in ten million
303 *
304 * Lock contention probability for two threads accessing distinct
305 * elements is roughly 1 / (8 * #elements) under random hashes.
306 *
307 * Actual hash code distributions encountered in practice
308 * sometimes deviate significantly from uniform randomness. This
309 * includes the case when N > (1<<30), so some keys MUST collide.
310 * Similarly for dumb or hostile usages in which multiple keys are
311 * designed to have identical hash codes. Also, although we guard
312 * against the worst effects of this (see method spread), sets of
313 * hashes may differ only in bits that do not impact their bin
314 * index for a given power-of-two mask. So we use a secondary
315 * strategy that applies when the number of nodes in a bin exceeds
316 * a threshold, and at least one of the keys implements
317 * Comparable. These TreeBins use a balanced tree to hold nodes
318 * (a specialized form of red-black trees), bounding search time
319 * to O(log N). Each search step in a TreeBin is around twice as
320 * slow as in a regular list, but given that N cannot exceed
321 * (1<<64) (before running out of addresses) this bounds search
322 * steps, lock hold times, etc, to reasonable constants (roughly
323 * 100 nodes inspected per operation worst case) so long as keys
324 * are Comparable (which is very common -- String, Long, etc).
325 * TreeBin nodes (TreeNodes) also maintain the same "next"
326 * traversal pointers as regular nodes, so can be traversed in
327 * iterators in the same way.
328 *
329 * The table is resized when occupancy exceeds a percentage
330 * threshold (nominally, 0.75, but see below). Any thread
331 * noticing an overfull bin may assist in resizing after the
332 * initiating thread allocates and sets up the replacement
333 * array. However, rather than stalling, these other threads may
334 * proceed with insertions etc. The use of TreeBins shields us
335 * from the worst case effects of overfilling while resizes are in
336 * progress. Resizing proceeds by transferring bins, one by one,
337 * from the table to the next table. To enable concurrency, the
338 * next table must be (incrementally) prefilled with place-holders
339 * serving as reverse forwarders to the old table. Because we are
340 * using power-of-two expansion, the elements from each bin must
341 * either stay at same index, or move with a power of two
342 * offset. We eliminate unnecessary node creation by catching
343 * cases where old nodes can be reused because their next fields
344 * won't change. On average, only about one-sixth of them need
345 * cloning when a table doubles. The nodes they replace will be
346 * garbage collectable as soon as they are no longer referenced by
347 * any reader thread that may be in the midst of concurrently
348 * traversing table. Upon transfer, the old table bin contains
349 * only a special forwarding node (with hash field "MOVED") that
350 * contains the next table as its key. On encountering a
351 * forwarding node, access and update operations restart, using
352 * the new table.
353 *
354 * Each bin transfer requires its bin lock, which can stall
355 * waiting for locks while resizing. However, because other
356 * threads can join in and help resize rather than contend for
357 * locks, average aggregate waits become shorter as resizing
358 * progresses. The transfer operation must also ensure that all
359 * accessible bins in both the old and new table are usable by any
360 * traversal. This is arranged by proceeding from the last bin
361 * (table.length - 1) up towards the first. Upon seeing a
362 * forwarding node, traversals (see class Traverser) arrange to
363 * move to the new table without revisiting nodes. However, to
364 * ensure that no intervening nodes are skipped, bin splitting can
365 * only begin after the associated reverse-forwarders are in
366 * place.
367 *
368 * The traversal scheme also applies to partial traversals of
369 * ranges of bins (via an alternate Traverser constructor)
370 * to support partitioned aggregate operations. Also, read-only
371 * operations give up if ever forwarded to a null table, which
372 * provides support for shutdown-style clearing, which is also not
373 * currently implemented.
374 *
375 * Lazy table initialization minimizes footprint until first use,
376 * and also avoids resizings when the first operation is from a
377 * putAll, constructor with map argument, or deserialization.
378 * These cases attempt to override the initial capacity settings,
379 * but harmlessly fail to take effect in cases of races.
380 *
381 * The element count is maintained using a specialization of
382 * LongAdder. We need to incorporate a specialization rather than
383 * just use a LongAdder in order to access implicit
384 * contention-sensing that leads to creation of multiple
385 * Cells. The counter mechanics avoid contention on
386 * updates but can encounter cache thrashing if read too
387 * frequently during concurrent access. To avoid reading so often,
388 * resizing under contention is attempted only upon adding to a
389 * bin already holding two or more nodes. Under uniform hash
390 * distributions, the probability of this occurring at threshold
391 * is around 13%, meaning that only about 1 in 8 puts check
392 * threshold (and after resizing, many fewer do so). The bulk
393 * putAll operation further reduces contention by only committing
394 * count updates upon these size checks.
395 *
396 * Maintaining API and serialization compatibility with previous
397 * versions of this class introduces several oddities. Mainly: We
398 * leave untouched but unused constructor arguments refering to
399 * concurrencyLevel. We accept a loadFactor constructor argument,
400 * but apply it only to initial table capacity (which is the only
401 * time that we can guarantee to honor it.) We also declare an
402 * unused "Segment" class that is instantiated in minimal form
403 * only when serializing.
404 */
405
406 /* ---------------- Constants -------------- */
407
408 /**
409 * The largest possible table capacity. This value must be
410 * exactly 1<<30 to stay within Java array allocation and indexing
411 * bounds for power of two table sizes, and is further required
412 * because the top two bits of 32bit hash fields are used for
413 * control purposes.
414 */
415 private static final int MAXIMUM_CAPACITY = 1 << 30;
416
417 /**
418 * The default initial table capacity. Must be a power of 2
419 * (i.e., at least 1) and at most MAXIMUM_CAPACITY.
420 */
421 private static final int DEFAULT_CAPACITY = 16;
422
423 /**
424 * The largest possible (non-power of two) array size.
425 * Needed by toArray and related methods.
426 */
427 static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8;
428
429 /**
430 * The default concurrency level for this table. Unused but
431 * defined for compatibility with previous versions of this class.
432 */
433 private static final int DEFAULT_CONCURRENCY_LEVEL = 16;
434
435 /**
436 * The load factor for this table. Overrides of this value in
437 * constructors affect only the initial table capacity. The
438 * actual floating point value isn't normally used -- it is
439 * simpler to use expressions such as {@code n - (n >>> 2)} for
440 * the associated resizing threshold.
441 */
442 private static final float LOAD_FACTOR = 0.75f;
443
444 /**
445 * The bin count threshold for using a tree rather than list for a
446 * bin. The value reflects the approximate break-even point for
447 * using tree-based operations.
448 */
449 private static final int TREE_THRESHOLD = 8;
450
451 /**
452 * Minimum number of rebinnings per transfer step. Ranges are
453 * subdivided to allow multiple resizer threads. This value
454 * serves as a lower bound to avoid resizers encountering
455 * excessive memory contention. The value should be at least
456 * DEFAULT_CAPACITY.
457 */
458 private static final int MIN_TRANSFER_STRIDE = 16;
459
460 /*
461 * Encodings for Node hash fields. See above for explanation.
462 */
463 static final int MOVED = 0x80000000; // hash field for forwarding nodes
464 static final int HASH_BITS = 0x7fffffff; // usable bits of normal node hash
465
466 /** Number of CPUS, to place bounds on some sizings */
467 static final int NCPU = Runtime.getRuntime().availableProcessors();
468
469 /* ---------------- Counters -------------- */
470
471 // Adapted from LongAdder and Striped64.
472 // See their internal docs for explanation.
473
474 // A padded cell for distributing counts
475 static final class Cell {
476 volatile long p0, p1, p2, p3, p4, p5, p6;
477 volatile long value;
478 volatile long q0, q1, q2, q3, q4, q5, q6;
479 Cell(long x) { value = x; }
480 }
481
482 /**
483 * Holder for the thread-local hash code determining which
484 * Cell to use. The code is initialized via the
485 * cellHashCodeGenerator, but may be moved upon collisions.
486 */
487 static final class CellHashCode {
488 int code;
489 }
490
491 /**
492 * Generates initial value for per-thread CellHashCodes
493 */
494 static final AtomicInteger cellHashCodeGenerator = new AtomicInteger();
495
496 /**
497 * Increment for cellHashCodeGenerator. See class ThreadLocal
498 * for explanation.
499 */
500 static final int SEED_INCREMENT = 0x61c88647;
501
502 /**
503 * Per-thread counter hash codes. Shared across all instances.
504 */
505 static final ThreadLocal<CellHashCode> threadCellHashCode =
506 new ThreadLocal<CellHashCode>();
507
508 /* ---------------- Fields -------------- */
509
510 /**
511 * The array of bins. Lazily initialized upon first insertion.
512 * Size is always a power of two. Accessed directly by iterators.
513 */
514 transient volatile Node<V>[] table;
515
516 /**
517 * The next table to use; non-null only while resizing.
518 */
519 private transient volatile Node<V>[] nextTable;
520
521 /**
522 * Base counter value, used mainly when there is no contention,
523 * but also as a fallback during table initialization
524 * races. Updated via CAS.
525 */
526 private transient volatile long baseCount;
527
528 /**
529 * Table initialization and resizing control. When negative, the
530 * table is being initialized or resized: -1 for initialization,
531 * else -(1 + the number of active resizing threads). Otherwise,
532 * when table is null, holds the initial table size to use upon
533 * creation, or 0 for default. After initialization, holds the
534 * next element count value upon which to resize the table.
535 */
536 private transient volatile int sizeCtl;
537
538 /**
539 * The next table index (plus one) to split while resizing.
540 */
541 private transient volatile int transferIndex;
542
543 /**
544 * The least available table index to split while resizing.
545 */
546 private transient volatile int transferOrigin;
547
548 /**
549 * Spinlock (locked via CAS) used when resizing and/or creating Cells.
550 */
551 private transient volatile int cellsBusy;
552
553 /**
554 * Table of counter cells. When non-null, size is a power of 2.
555 */
556 private transient volatile Cell[] counterCells;
557
558 // views
559 private transient KeySetView<K,V> keySet;
560 private transient ValuesView<K,V> values;
561 private transient EntrySetView<K,V> entrySet;
562
563 /** For serialization compatibility. Null unless serialized; see below */
564 private Segment<K,V>[] segments;
565
566 /* ---------------- Table element access -------------- */
567
568 /*
569 * Volatile access methods are used for table elements as well as
570 * elements of in-progress next table while resizing. Uses are
571 * null checked by callers, and implicitly bounds-checked, relying
572 * on the invariants that tab arrays have non-zero size, and all
573 * indices are masked with (tab.length - 1) which is never
574 * negative and always less than length. Note that, to be correct
575 * wrt arbitrary concurrency errors by users, bounds checks must
576 * operate on local variables, which accounts for some odd-looking
577 * inline assignments below.
578 */
579
580 @SuppressWarnings("unchecked") static final <V> Node<V> tabAt
581 (Node<V>[] tab, int i) { // used by Traverser
582 return (Node<V>)U.getObjectVolatile(tab, ((long)i << ASHIFT) + ABASE);
583 }
584
585 private static final <V> boolean casTabAt
586 (Node<V>[] tab, int i, Node<V> c, Node<V> v) {
587 return U.compareAndSwapObject(tab, ((long)i << ASHIFT) + ABASE, c, v);
588 }
589
590 private static final <V> void setTabAt
591 (Node<V>[] tab, int i, Node<V> v) {
592 U.putObjectVolatile(tab, ((long)i << ASHIFT) + ABASE, v);
593 }
594
595 /* ---------------- Nodes -------------- */
596
597 /**
598 * Key-value entry. Note that this is never exported out as a
599 * user-visible Map.Entry (see MapEntry below). Nodes with a hash
600 * field of MOVED are special, and do not contain user keys or
601 * values. Otherwise, keys are never null, and null val fields
602 * indicate that a node is in the process of being deleted or
603 * created. For purposes of read-only access, a key may be read
604 * before a val, but can only be used after checking val to be
605 * non-null.
606 */
607 static class Node<V> {
608 final int hash;
609 final Object key;
610 volatile V val;
611 volatile Node<V> next;
612
613 Node(int hash, Object key, V val, Node<V> next) {
614 this.hash = hash;
615 this.key = key;
616 this.val = val;
617 this.next = next;
618 }
619 }
620
621 /* ---------------- TreeBins -------------- */
622
623 /**
624 * Nodes for use in TreeBins
625 */
626 static final class TreeNode<V> extends Node<V> {
627 TreeNode<V> parent; // red-black tree links
628 TreeNode<V> left;
629 TreeNode<V> right;
630 TreeNode<V> prev; // needed to unlink next upon deletion
631 boolean red;
632
633 TreeNode(int hash, Object key, V val, Node<V> next, TreeNode<V> parent) {
634 super(hash, key, val, next);
635 this.parent = parent;
636 }
637 }
638
639 /**
640 * A specialized form of red-black tree for use in bins
641 * whose size exceeds a threshold.
642 *
643 * TreeBins use a special form of comparison for search and
644 * related operations (which is the main reason we cannot use
645 * existing collections such as TreeMaps). TreeBins contain
646 * Comparable elements, but may contain others, as well as
647 * elements that are Comparable but not necessarily Comparable<T>
648 * for the same T, so we cannot invoke compareTo among them. To
649 * handle this, the tree is ordered primarily by hash value, then
650 * by getClass().getName() order, and then by Comparator order
651 * among elements of the same class. On lookup at a node, if
652 * elements are not comparable or compare as 0, both left and
653 * right children may need to be searched in the case of tied hash
654 * values. (This corresponds to the full list search that would be
655 * necessary if all elements were non-Comparable and had tied
656 * hashes.) The red-black balancing code is updated from
657 * pre-jdk-collections
658 * (http://gee.cs.oswego.edu/dl/classes/collections/RBCell.java)
659 * based in turn on Cormen, Leiserson, and Rivest "Introduction to
660 * Algorithms" (CLR).
661 *
662 * TreeBins also maintain a separate locking discipline than
663 * regular bins. Because they are forwarded via special MOVED
664 * nodes at bin heads (which can never change once established),
665 * we cannot use those nodes as locks. Instead, TreeBin
666 * extends AbstractQueuedSynchronizer to support a simple form of
667 * read-write lock. For update operations and table validation,
668 * the exclusive form of lock behaves in the same way as bin-head
669 * locks. However, lookups use shared read-lock mechanics to allow
670 * multiple readers in the absence of writers. Additionally,
671 * these lookups do not ever block: While the lock is not
672 * available, they proceed along the slow traversal path (via
673 * next-pointers) until the lock becomes available or the list is
674 * exhausted, whichever comes first. (These cases are not fast,
675 * but maximize aggregate expected throughput.) The AQS mechanics
676 * for doing this are straightforward. The lock state is held as
677 * AQS getState(). Read counts are negative; the write count (1)
678 * is positive. There are no signalling preferences among readers
679 * and writers. Since we don't need to export full Lock API, we
680 * just override the minimal AQS methods and use them directly.
681 */
682 static final class TreeBin<V> extends AbstractQueuedSynchronizer {
683 private static final long serialVersionUID = 2249069246763182397L;
684 transient TreeNode<V> root; // root of tree
685 transient TreeNode<V> first; // head of next-pointer list
686
687 /* AQS overrides */
688 public final boolean isHeldExclusively() { return getState() > 0; }
689 public final boolean tryAcquire(int ignore) {
690 if (compareAndSetState(0, 1)) {
691 setExclusiveOwnerThread(Thread.currentThread());
692 return true;
693 }
694 return false;
695 }
696 public final boolean tryRelease(int ignore) {
697 setExclusiveOwnerThread(null);
698 setState(0);
699 return true;
700 }
701 public final int tryAcquireShared(int ignore) {
702 for (int c;;) {
703 if ((c = getState()) > 0)
704 return -1;
705 if (compareAndSetState(c, c -1))
706 return 1;
707 }
708 }
709 public final boolean tryReleaseShared(int ignore) {
710 int c;
711 do {} while (!compareAndSetState(c = getState(), c + 1));
712 return c == -1;
713 }
714
715 /** From CLR */
716 private void rotateLeft(TreeNode<V> p) {
717 if (p != null) {
718 TreeNode<V> r = p.right, pp, rl;
719 if ((rl = p.right = r.left) != null)
720 rl.parent = p;
721 if ((pp = r.parent = p.parent) == null)
722 root = r;
723 else if (pp.left == p)
724 pp.left = r;
725 else
726 pp.right = r;
727 r.left = p;
728 p.parent = r;
729 }
730 }
731
732 /** From CLR */
733 private void rotateRight(TreeNode<V> p) {
734 if (p != null) {
735 TreeNode<V> l = p.left, pp, lr;
736 if ((lr = p.left = l.right) != null)
737 lr.parent = p;
738 if ((pp = l.parent = p.parent) == null)
739 root = l;
740 else if (pp.right == p)
741 pp.right = l;
742 else
743 pp.left = l;
744 l.right = p;
745 p.parent = l;
746 }
747 }
748
749 /**
750 * Returns the TreeNode (or null if not found) for the given key
751 * starting at given root.
752 */
753 @SuppressWarnings("unchecked") final TreeNode<V> getTreeNode
754 (int h, Object k, TreeNode<V> p) {
755 Class<?> c = k.getClass();
756 while (p != null) {
757 int dir, ph; Object pk; Class<?> pc;
758 if ((ph = p.hash) == h) {
759 if ((pk = p.key) == k || k.equals(pk))
760 return p;
761 if (c != (pc = pk.getClass()) ||
762 !(k instanceof Comparable) ||
763 (dir = ((Comparable)k).compareTo((Comparable)pk)) == 0) {
764 if ((dir = (c == pc) ? 0 :
765 c.getName().compareTo(pc.getName())) == 0) {
766 TreeNode<V> r = null, pl, pr; // check both sides
767 if ((pr = p.right) != null && h >= pr.hash &&
768 (r = getTreeNode(h, k, pr)) != null)
769 return r;
770 else if ((pl = p.left) != null && h <= pl.hash)
771 dir = -1;
772 else // nothing there
773 return null;
774 }
775 }
776 }
777 else
778 dir = (h < ph) ? -1 : 1;
779 p = (dir > 0) ? p.right : p.left;
780 }
781 return null;
782 }
783
784 /**
785 * Wrapper for getTreeNode used by CHM.get. Tries to obtain
786 * read-lock to call getTreeNode, but during failure to get
787 * lock, searches along next links.
788 */
789 final V getValue(int h, Object k) {
790 Node<V> r = null;
791 int c = getState(); // Must read lock state first
792 for (Node<V> e = first; e != null; e = e.next) {
793 if (c <= 0 && compareAndSetState(c, c - 1)) {
794 try {
795 r = getTreeNode(h, k, root);
796 } finally {
797 releaseShared(0);
798 }
799 break;
800 }
801 else if (e.hash == h && k.equals(e.key)) {
802 r = e;
803 break;
804 }
805 else
806 c = getState();
807 }
808 return r == null ? null : r.val;
809 }
810
811 /**
812 * Finds or adds a node.
813 * @return null if added
814 */
815 @SuppressWarnings("unchecked") final TreeNode<V> putTreeNode
816 (int h, Object k, V v) {
817 Class<?> c = k.getClass();
818 TreeNode<V> pp = root, p = null;
819 int dir = 0;
820 while (pp != null) { // find existing node or leaf to insert at
821 int ph; Object pk; Class<?> pc;
822 p = pp;
823 if ((ph = p.hash) == h) {
824 if ((pk = p.key) == k || k.equals(pk))
825 return p;
826 if (c != (pc = pk.getClass()) ||
827 !(k instanceof Comparable) ||
828 (dir = ((Comparable)k).compareTo((Comparable)pk)) == 0) {
829 TreeNode<V> s = null, r = null, pr;
830 if ((dir = (c == pc) ? 0 :
831 c.getName().compareTo(pc.getName())) == 0) {
832 if ((pr = p.right) != null && h >= pr.hash &&
833 (r = getTreeNode(h, k, pr)) != null)
834 return r;
835 else // continue left
836 dir = -1;
837 }
838 else if ((pr = p.right) != null && h >= pr.hash)
839 s = pr;
840 if (s != null && (r = getTreeNode(h, k, s)) != null)
841 return r;
842 }
843 }
844 else
845 dir = (h < ph) ? -1 : 1;
846 pp = (dir > 0) ? p.right : p.left;
847 }
848
849 TreeNode<V> f = first;
850 TreeNode<V> x = first = new TreeNode<V>(h, k, v, f, p);
851 if (p == null)
852 root = x;
853 else { // attach and rebalance; adapted from CLR
854 TreeNode<V> xp, xpp;
855 if (f != null)
856 f.prev = x;
857 if (dir <= 0)
858 p.left = x;
859 else
860 p.right = x;
861 x.red = true;
862 while (x != null && (xp = x.parent) != null && xp.red &&
863 (xpp = xp.parent) != null) {
864 TreeNode<V> xppl = xpp.left;
865 if (xp == xppl) {
866 TreeNode<V> y = xpp.right;
867 if (y != null && y.red) {
868 y.red = false;
869 xp.red = false;
870 xpp.red = true;
871 x = xpp;
872 }
873 else {
874 if (x == xp.right) {
875 rotateLeft(x = xp);
876 xpp = (xp = x.parent) == null ? null : xp.parent;
877 }
878 if (xp != null) {
879 xp.red = false;
880 if (xpp != null) {
881 xpp.red = true;
882 rotateRight(xpp);
883 }
884 }
885 }
886 }
887 else {
888 TreeNode<V> y = xppl;
889 if (y != null && y.red) {
890 y.red = false;
891 xp.red = false;
892 xpp.red = true;
893 x = xpp;
894 }
895 else {
896 if (x == xp.left) {
897 rotateRight(x = xp);
898 xpp = (xp = x.parent) == null ? null : xp.parent;
899 }
900 if (xp != null) {
901 xp.red = false;
902 if (xpp != null) {
903 xpp.red = true;
904 rotateLeft(xpp);
905 }
906 }
907 }
908 }
909 }
910 TreeNode<V> r = root;
911 if (r != null && r.red)
912 r.red = false;
913 }
914 return null;
915 }
916
917 /**
918 * Removes the given node, that must be present before this
919 * call. This is messier than typical red-black deletion code
920 * because we cannot swap the contents of an interior node
921 * with a leaf successor that is pinned by "next" pointers
922 * that are accessible independently of lock. So instead we
923 * swap the tree linkages.
924 */
925 final void deleteTreeNode(TreeNode<V> p) {
926 TreeNode<V> next = (TreeNode<V>)p.next; // unlink traversal pointers
927 TreeNode<V> pred = p.prev;
928 if (pred == null)
929 first = next;
930 else
931 pred.next = next;
932 if (next != null)
933 next.prev = pred;
934 TreeNode<V> replacement;
935 TreeNode<V> pl = p.left;
936 TreeNode<V> pr = p.right;
937 if (pl != null && pr != null) {
938 TreeNode<V> s = pr, sl;
939 while ((sl = s.left) != null) // find successor
940 s = sl;
941 boolean c = s.red; s.red = p.red; p.red = c; // swap colors
942 TreeNode<V> sr = s.right;
943 TreeNode<V> pp = p.parent;
944 if (s == pr) { // p was s's direct parent
945 p.parent = s;
946 s.right = p;
947 }
948 else {
949 TreeNode<V> sp = s.parent;
950 if ((p.parent = sp) != null) {
951 if (s == sp.left)
952 sp.left = p;
953 else
954 sp.right = p;
955 }
956 if ((s.right = pr) != null)
957 pr.parent = s;
958 }
959 p.left = null;
960 if ((p.right = sr) != null)
961 sr.parent = p;
962 if ((s.left = pl) != null)
963 pl.parent = s;
964 if ((s.parent = pp) == null)
965 root = s;
966 else if (p == pp.left)
967 pp.left = s;
968 else
969 pp.right = s;
970 replacement = sr;
971 }
972 else
973 replacement = (pl != null) ? pl : pr;
974 TreeNode<V> pp = p.parent;
975 if (replacement == null) {
976 if (pp == null) {
977 root = null;
978 return;
979 }
980 replacement = p;
981 }
982 else {
983 replacement.parent = pp;
984 if (pp == null)
985 root = replacement;
986 else if (p == pp.left)
987 pp.left = replacement;
988 else
989 pp.right = replacement;
990 p.left = p.right = p.parent = null;
991 }
992 if (!p.red) { // rebalance, from CLR
993 TreeNode<V> x = replacement;
994 while (x != null) {
995 TreeNode<V> xp, xpl;
996 if (x.red || (xp = x.parent) == null) {
997 x.red = false;
998 break;
999 }
1000 if (x == (xpl = xp.left)) {
1001 TreeNode<V> sib = xp.right;
1002 if (sib != null && sib.red) {
1003 sib.red = false;
1004 xp.red = true;
1005 rotateLeft(xp);
1006 sib = (xp = x.parent) == null ? null : xp.right;
1007 }
1008 if (sib == null)
1009 x = xp;
1010 else {
1011 TreeNode<V> sl = sib.left, sr = sib.right;
1012 if ((sr == null || !sr.red) &&
1013 (sl == null || !sl.red)) {
1014 sib.red = true;
1015 x = xp;
1016 }
1017 else {
1018 if (sr == null || !sr.red) {
1019 if (sl != null)
1020 sl.red = false;
1021 sib.red = true;
1022 rotateRight(sib);
1023 sib = (xp = x.parent) == null ?
1024 null : xp.right;
1025 }
1026 if (sib != null) {
1027 sib.red = (xp == null) ? false : xp.red;
1028 if ((sr = sib.right) != null)
1029 sr.red = false;
1030 }
1031 if (xp != null) {
1032 xp.red = false;
1033 rotateLeft(xp);
1034 }
1035 x = root;
1036 }
1037 }
1038 }
1039 else { // symmetric
1040 TreeNode<V> sib = xpl;
1041 if (sib != null && sib.red) {
1042 sib.red = false;
1043 xp.red = true;
1044 rotateRight(xp);
1045 sib = (xp = x.parent) == null ? null : xp.left;
1046 }
1047 if (sib == null)
1048 x = xp;
1049 else {
1050 TreeNode<V> sl = sib.left, sr = sib.right;
1051 if ((sl == null || !sl.red) &&
1052 (sr == null || !sr.red)) {
1053 sib.red = true;
1054 x = xp;
1055 }
1056 else {
1057 if (sl == null || !sl.red) {
1058 if (sr != null)
1059 sr.red = false;
1060 sib.red = true;
1061 rotateLeft(sib);
1062 sib = (xp = x.parent) == null ?
1063 null : xp.left;
1064 }
1065 if (sib != null) {
1066 sib.red = (xp == null) ? false : xp.red;
1067 if ((sl = sib.left) != null)
1068 sl.red = false;
1069 }
1070 if (xp != null) {
1071 xp.red = false;
1072 rotateRight(xp);
1073 }
1074 x = root;
1075 }
1076 }
1077 }
1078 }
1079 }
1080 if (p == replacement && (pp = p.parent) != null) {
1081 if (p == pp.left) // detach pointers
1082 pp.left = null;
1083 else if (p == pp.right)
1084 pp.right = null;
1085 p.parent = null;
1086 }
1087 }
1088 }
1089
1090 /* ---------------- Collision reduction methods -------------- */
1091
1092 /**
1093 * Spreads higher bits to lower, and also forces top bit to 0.
1094 * Because the table uses power-of-two masking, sets of hashes
1095 * that vary only in bits above the current mask will always
1096 * collide. (Among known examples are sets of Float keys holding
1097 * consecutive whole numbers in small tables.) To counter this,
1098 * we apply a transform that spreads the impact of higher bits
1099 * downward. There is a tradeoff between speed, utility, and
1100 * quality of bit-spreading. Because many common sets of hashes
1101 * are already reasonably distributed across bits (so don't benefit
1102 * from spreading), and because we use trees to handle large sets
1103 * of collisions in bins, we don't need excessively high quality.
1104 */
1105 private static final int spread(int h) {
1106 h ^= (h >>> 18) ^ (h >>> 12);
1107 return (h ^ (h >>> 10)) & HASH_BITS;
1108 }
1109
1110 /**
1111 * Replaces a list bin with a tree bin if key is comparable. Call
1112 * only when locked.
1113 */
1114 private final void replaceWithTreeBin(Node<V>[] tab, int index, Object key) {
1115 if (key instanceof Comparable) {
1116 TreeBin<V> t = new TreeBin<V>();
1117 for (Node<V> e = tabAt(tab, index); e != null; e = e.next)
1118 t.putTreeNode(e.hash, e.key, e.val);
1119 setTabAt(tab, index, new Node<V>(MOVED, t, null, null));
1120 }
1121 }
1122
1123 /* ---------------- Internal access and update methods -------------- */
1124
1125 /** Implementation for get and containsKey */
1126 @SuppressWarnings("unchecked") private final V internalGet(Object k) {
1127 int h = spread(k.hashCode());
1128 retry: for (Node<V>[] tab = table; tab != null;) {
1129 Node<V> e; Object ek; V ev; int eh; // locals to read fields once
1130 for (e = tabAt(tab, (tab.length - 1) & h); e != null; e = e.next) {
1131 if ((eh = e.hash) < 0) {
1132 if ((ek = e.key) instanceof TreeBin) // search TreeBin
1133 return ((TreeBin<V>)ek).getValue(h, k);
1134 else { // restart with new table
1135 tab = (Node<V>[])ek;
1136 continue retry;
1137 }
1138 }
1139 else if (eh == h && (ev = e.val) != null &&
1140 ((ek = e.key) == k || k.equals(ek)))
1141 return ev;
1142 }
1143 break;
1144 }
1145 return null;
1146 }
1147
1148 /**
1149 * Implementation for the four public remove/replace methods:
1150 * Replaces node value with v, conditional upon match of cv if
1151 * non-null. If resulting value is null, delete.
1152 */
1153 @SuppressWarnings("unchecked") private final V internalReplace
1154 (Object k, V v, Object cv) {
1155 int h = spread(k.hashCode());
1156 V oldVal = null;
1157 for (Node<V>[] tab = table;;) {
1158 Node<V> f; int i, fh; Object fk;
1159 if (tab == null ||
1160 (f = tabAt(tab, i = (tab.length - 1) & h)) == null)
1161 break;
1162 else if ((fh = f.hash) < 0) {
1163 if ((fk = f.key) instanceof TreeBin) {
1164 TreeBin<V> t = (TreeBin<V>)fk;
1165 boolean validated = false;
1166 boolean deleted = false;
1167 t.acquire(0);
1168 try {
1169 if (tabAt(tab, i) == f) {
1170 validated = true;
1171 TreeNode<V> p = t.getTreeNode(h, k, t.root);
1172 if (p != null) {
1173 V pv = p.val;
1174 if (cv == null || cv == pv || cv.equals(pv)) {
1175 oldVal = pv;
1176 if ((p.val = v) == null) {
1177 deleted = true;
1178 t.deleteTreeNode(p);
1179 }
1180 }
1181 }
1182 }
1183 } finally {
1184 t.release(0);
1185 }
1186 if (validated) {
1187 if (deleted)
1188 addCount(-1L, -1);
1189 break;
1190 }
1191 }
1192 else
1193 tab = (Node<V>[])fk;
1194 }
1195 else if (fh != h && f.next == null) // precheck
1196 break; // rules out possible existence
1197 else {
1198 boolean validated = false;
1199 boolean deleted = false;
1200 synchronized (f) {
1201 if (tabAt(tab, i) == f) {
1202 validated = true;
1203 for (Node<V> e = f, pred = null;;) {
1204 Object ek; V ev;
1205 if (e.hash == h &&
1206 ((ev = e.val) != null) &&
1207 ((ek = e.key) == k || k.equals(ek))) {
1208 if (cv == null || cv == ev || cv.equals(ev)) {
1209 oldVal = ev;
1210 if ((e.val = v) == null) {
1211 deleted = true;
1212 Node<V> en = e.next;
1213 if (pred != null)
1214 pred.next = en;
1215 else
1216 setTabAt(tab, i, en);
1217 }
1218 }
1219 break;
1220 }
1221 pred = e;
1222 if ((e = e.next) == null)
1223 break;
1224 }
1225 }
1226 }
1227 if (validated) {
1228 if (deleted)
1229 addCount(-1L, -1);
1230 break;
1231 }
1232 }
1233 }
1234 return oldVal;
1235 }
1236
1237 /*
1238 * Internal versions of insertion methods
1239 * All have the same basic structure as the first (internalPut):
1240 * 1. If table uninitialized, create
1241 * 2. If bin empty, try to CAS new node
1242 * 3. If bin stale, use new table
1243 * 4. if bin converted to TreeBin, validate and relay to TreeBin methods
1244 * 5. Lock and validate; if valid, scan and add or update
1245 *
1246 * The putAll method differs mainly in attempting to pre-allocate
1247 * enough table space, and also more lazily performs count updates
1248 * and checks.
1249 *
1250 * Most of the function-accepting methods can't be factored nicely
1251 * because they require different functional forms, so instead
1252 * sprawl out similar mechanics.
1253 */
1254
1255 /** Implementation for put and putIfAbsent */
1256 @SuppressWarnings("unchecked") private final V internalPut
1257 (K k, V v, boolean onlyIfAbsent) {
1258 if (k == null || v == null) throw new NullPointerException();
1259 int h = spread(k.hashCode());
1260 int len = 0;
1261 for (Node<V>[] tab = table;;) {
1262 int i, fh; Node<V> f; Object fk; V fv;
1263 if (tab == null)
1264 tab = initTable();
1265 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1266 if (casTabAt(tab, i, null, new Node<V>(h, k, v, null)))
1267 break; // no lock when adding to empty bin
1268 }
1269 else if ((fh = f.hash) < 0) {
1270 if ((fk = f.key) instanceof TreeBin) {
1271 TreeBin<V> t = (TreeBin<V>)fk;
1272 V oldVal = null;
1273 t.acquire(0);
1274 try {
1275 if (tabAt(tab, i) == f) {
1276 len = 2;
1277 TreeNode<V> p = t.putTreeNode(h, k, v);
1278 if (p != null) {
1279 oldVal = p.val;
1280 if (!onlyIfAbsent)
1281 p.val = v;
1282 }
1283 }
1284 } finally {
1285 t.release(0);
1286 }
1287 if (len != 0) {
1288 if (oldVal != null)
1289 return oldVal;
1290 break;
1291 }
1292 }
1293 else
1294 tab = (Node<V>[])fk;
1295 }
1296 else if (onlyIfAbsent && fh == h && (fv = f.val) != null &&
1297 ((fk = f.key) == k || k.equals(fk))) // peek while nearby
1298 return fv;
1299 else {
1300 V oldVal = null;
1301 synchronized (f) {
1302 if (tabAt(tab, i) == f) {
1303 len = 1;
1304 for (Node<V> e = f;; ++len) {
1305 Object ek; V ev;
1306 if (e.hash == h &&
1307 (ev = e.val) != null &&
1308 ((ek = e.key) == k || k.equals(ek))) {
1309 oldVal = ev;
1310 if (!onlyIfAbsent)
1311 e.val = v;
1312 break;
1313 }
1314 Node<V> last = e;
1315 if ((e = e.next) == null) {
1316 last.next = new Node<V>(h, k, v, null);
1317 if (len >= TREE_THRESHOLD)
1318 replaceWithTreeBin(tab, i, k);
1319 break;
1320 }
1321 }
1322 }
1323 }
1324 if (len != 0) {
1325 if (oldVal != null)
1326 return oldVal;
1327 break;
1328 }
1329 }
1330 }
1331 addCount(1L, len);
1332 return null;
1333 }
1334
1335 /** Implementation for computeIfAbsent */
1336 @SuppressWarnings("unchecked") private final V internalComputeIfAbsent
1337 (K k, Function<? super K, ? extends V> mf) {
1338 if (k == null || mf == null)
1339 throw new NullPointerException();
1340 int h = spread(k.hashCode());
1341 V val = null;
1342 int len = 0;
1343 for (Node<V>[] tab = table;;) {
1344 Node<V> f; int i; Object fk;
1345 if (tab == null)
1346 tab = initTable();
1347 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1348 Node<V> node = new Node<V>(h, k, null, null);
1349 synchronized (node) {
1350 if (casTabAt(tab, i, null, node)) {
1351 len = 1;
1352 try {
1353 if ((val = mf.apply(k)) != null)
1354 node.val = val;
1355 } finally {
1356 if (val == null)
1357 setTabAt(tab, i, null);
1358 }
1359 }
1360 }
1361 if (len != 0)
1362 break;
1363 }
1364 else if (f.hash < 0) {
1365 if ((fk = f.key) instanceof TreeBin) {
1366 TreeBin<V> t = (TreeBin<V>)fk;
1367 boolean added = false;
1368 t.acquire(0);
1369 try {
1370 if (tabAt(tab, i) == f) {
1371 len = 1;
1372 TreeNode<V> p = t.getTreeNode(h, k, t.root);
1373 if (p != null)
1374 val = p.val;
1375 else if ((val = mf.apply(k)) != null) {
1376 added = true;
1377 len = 2;
1378 t.putTreeNode(h, k, val);
1379 }
1380 }
1381 } finally {
1382 t.release(0);
1383 }
1384 if (len != 0) {
1385 if (!added)
1386 return val;
1387 break;
1388 }
1389 }
1390 else
1391 tab = (Node<V>[])fk;
1392 }
1393 else {
1394 for (Node<V> e = f; e != null; e = e.next) { // prescan
1395 Object ek; V ev;
1396 if (e.hash == h && (ev = e.val) != null &&
1397 ((ek = e.key) == k || k.equals(ek)))
1398 return ev;
1399 }
1400 boolean added = false;
1401 synchronized (f) {
1402 if (tabAt(tab, i) == f) {
1403 len = 1;
1404 for (Node<V> e = f;; ++len) {
1405 Object ek; V ev;
1406 if (e.hash == h &&
1407 (ev = e.val) != null &&
1408 ((ek = e.key) == k || k.equals(ek))) {
1409 val = ev;
1410 break;
1411 }
1412 Node<V> last = e;
1413 if ((e = e.next) == null) {
1414 if ((val = mf.apply(k)) != null) {
1415 added = true;
1416 last.next = new Node<V>(h, k, val, null);
1417 if (len >= TREE_THRESHOLD)
1418 replaceWithTreeBin(tab, i, k);
1419 }
1420 break;
1421 }
1422 }
1423 }
1424 }
1425 if (len != 0) {
1426 if (!added)
1427 return val;
1428 break;
1429 }
1430 }
1431 }
1432 if (val != null)
1433 addCount(1L, len);
1434 return val;
1435 }
1436
1437 /** Implementation for compute */
1438 @SuppressWarnings("unchecked") private final V internalCompute
1439 (K k, boolean onlyIfPresent,
1440 BiFunction<? super K, ? super V, ? extends V> mf) {
1441 if (k == null || mf == null)
1442 throw new NullPointerException();
1443 int h = spread(k.hashCode());
1444 V val = null;
1445 int delta = 0;
1446 int len = 0;
1447 for (Node<V>[] tab = table;;) {
1448 Node<V> f; int i, fh; Object fk;
1449 if (tab == null)
1450 tab = initTable();
1451 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1452 if (onlyIfPresent)
1453 break;
1454 Node<V> node = new Node<V>(h, k, null, null);
1455 synchronized (node) {
1456 if (casTabAt(tab, i, null, node)) {
1457 try {
1458 len = 1;
1459 if ((val = mf.apply(k, null)) != null) {
1460 node.val = val;
1461 delta = 1;
1462 }
1463 } finally {
1464 if (delta == 0)
1465 setTabAt(tab, i, null);
1466 }
1467 }
1468 }
1469 if (len != 0)
1470 break;
1471 }
1472 else if ((fh = f.hash) < 0) {
1473 if ((fk = f.key) instanceof TreeBin) {
1474 TreeBin<V> t = (TreeBin<V>)fk;
1475 t.acquire(0);
1476 try {
1477 if (tabAt(tab, i) == f) {
1478 len = 1;
1479 TreeNode<V> p = t.getTreeNode(h, k, t.root);
1480 if (p == null && onlyIfPresent)
1481 break;
1482 V pv = (p == null) ? null : p.val;
1483 if ((val = mf.apply(k, pv)) != null) {
1484 if (p != null)
1485 p.val = val;
1486 else {
1487 len = 2;
1488 delta = 1;
1489 t.putTreeNode(h, k, val);
1490 }
1491 }
1492 else if (p != null) {
1493 delta = -1;
1494 t.deleteTreeNode(p);
1495 }
1496 }
1497 } finally {
1498 t.release(0);
1499 }
1500 if (len != 0)
1501 break;
1502 }
1503 else
1504 tab = (Node<V>[])fk;
1505 }
1506 else {
1507 synchronized (f) {
1508 if (tabAt(tab, i) == f) {
1509 len = 1;
1510 for (Node<V> e = f, pred = null;; ++len) {
1511 Object ek; V ev;
1512 if (e.hash == h &&
1513 (ev = e.val) != null &&
1514 ((ek = e.key) == k || k.equals(ek))) {
1515 val = mf.apply(k, ev);
1516 if (val != null)
1517 e.val = val;
1518 else {
1519 delta = -1;
1520 Node<V> en = e.next;
1521 if (pred != null)
1522 pred.next = en;
1523 else
1524 setTabAt(tab, i, en);
1525 }
1526 break;
1527 }
1528 pred = e;
1529 if ((e = e.next) == null) {
1530 if (!onlyIfPresent &&
1531 (val = mf.apply(k, null)) != null) {
1532 pred.next = new Node<V>(h, k, val, null);
1533 delta = 1;
1534 if (len >= TREE_THRESHOLD)
1535 replaceWithTreeBin(tab, i, k);
1536 }
1537 break;
1538 }
1539 }
1540 }
1541 }
1542 if (len != 0)
1543 break;
1544 }
1545 }
1546 if (delta != 0)
1547 addCount((long)delta, len);
1548 return val;
1549 }
1550
1551 /** Implementation for merge */
1552 @SuppressWarnings("unchecked") private final V internalMerge
1553 (K k, V v, BiFunction<? super V, ? super V, ? extends V> mf) {
1554 if (k == null || v == null || mf == null)
1555 throw new NullPointerException();
1556 int h = spread(k.hashCode());
1557 V val = null;
1558 int delta = 0;
1559 int len = 0;
1560 for (Node<V>[] tab = table;;) {
1561 int i; Node<V> f; Object fk; V fv;
1562 if (tab == null)
1563 tab = initTable();
1564 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1565 if (casTabAt(tab, i, null, new Node<V>(h, k, v, null))) {
1566 delta = 1;
1567 val = v;
1568 break;
1569 }
1570 }
1571 else if (f.hash < 0) {
1572 if ((fk = f.key) instanceof TreeBin) {
1573 TreeBin<V> t = (TreeBin<V>)fk;
1574 t.acquire(0);
1575 try {
1576 if (tabAt(tab, i) == f) {
1577 len = 1;
1578 TreeNode<V> p = t.getTreeNode(h, k, t.root);
1579 val = (p == null) ? v : mf.apply(p.val, v);
1580 if (val != null) {
1581 if (p != null)
1582 p.val = val;
1583 else {
1584 len = 2;
1585 delta = 1;
1586 t.putTreeNode(h, k, val);
1587 }
1588 }
1589 else if (p != null) {
1590 delta = -1;
1591 t.deleteTreeNode(p);
1592 }
1593 }
1594 } finally {
1595 t.release(0);
1596 }
1597 if (len != 0)
1598 break;
1599 }
1600 else
1601 tab = (Node<V>[])fk;
1602 }
1603 else {
1604 synchronized (f) {
1605 if (tabAt(tab, i) == f) {
1606 len = 1;
1607 for (Node<V> e = f, pred = null;; ++len) {
1608 Object ek; V ev;
1609 if (e.hash == h &&
1610 (ev = e.val) != null &&
1611 ((ek = e.key) == k || k.equals(ek))) {
1612 val = mf.apply(ev, v);
1613 if (val != null)
1614 e.val = val;
1615 else {
1616 delta = -1;
1617 Node<V> en = e.next;
1618 if (pred != null)
1619 pred.next = en;
1620 else
1621 setTabAt(tab, i, en);
1622 }
1623 break;
1624 }
1625 pred = e;
1626 if ((e = e.next) == null) {
1627 val = v;
1628 pred.next = new Node<V>(h, k, val, null);
1629 delta = 1;
1630 if (len >= TREE_THRESHOLD)
1631 replaceWithTreeBin(tab, i, k);
1632 break;
1633 }
1634 }
1635 }
1636 }
1637 if (len != 0)
1638 break;
1639 }
1640 }
1641 if (delta != 0)
1642 addCount((long)delta, len);
1643 return val;
1644 }
1645
1646 /** Implementation for putAll */
1647 @SuppressWarnings("unchecked") private final void internalPutAll
1648 (Map<? extends K, ? extends V> m) {
1649 tryPresize(m.size());
1650 long delta = 0L; // number of uncommitted additions
1651 boolean npe = false; // to throw exception on exit for nulls
1652 try { // to clean up counts on other exceptions
1653 for (Map.Entry<?, ? extends V> entry : m.entrySet()) {
1654 Object k; V v;
1655 if (entry == null || (k = entry.getKey()) == null ||
1656 (v = entry.getValue()) == null) {
1657 npe = true;
1658 break;
1659 }
1660 int h = spread(k.hashCode());
1661 for (Node<V>[] tab = table;;) {
1662 int i; Node<V> f; int fh; Object fk;
1663 if (tab == null)
1664 tab = initTable();
1665 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null){
1666 if (casTabAt(tab, i, null, new Node<V>(h, k, v, null))) {
1667 ++delta;
1668 break;
1669 }
1670 }
1671 else if ((fh = f.hash) < 0) {
1672 if ((fk = f.key) instanceof TreeBin) {
1673 TreeBin<V> t = (TreeBin<V>)fk;
1674 boolean validated = false;
1675 t.acquire(0);
1676 try {
1677 if (tabAt(tab, i) == f) {
1678 validated = true;
1679 TreeNode<V> p = t.getTreeNode(h, k, t.root);
1680 if (p != null)
1681 p.val = v;
1682 else {
1683 t.putTreeNode(h, k, v);
1684 ++delta;
1685 }
1686 }
1687 } finally {
1688 t.release(0);
1689 }
1690 if (validated)
1691 break;
1692 }
1693 else
1694 tab = (Node<V>[])fk;
1695 }
1696 else {
1697 int len = 0;
1698 synchronized (f) {
1699 if (tabAt(tab, i) == f) {
1700 len = 1;
1701 for (Node<V> e = f;; ++len) {
1702 Object ek; V ev;
1703 if (e.hash == h &&
1704 (ev = e.val) != null &&
1705 ((ek = e.key) == k || k.equals(ek))) {
1706 e.val = v;
1707 break;
1708 }
1709 Node<V> last = e;
1710 if ((e = e.next) == null) {
1711 ++delta;
1712 last.next = new Node<V>(h, k, v, null);
1713 if (len >= TREE_THRESHOLD)
1714 replaceWithTreeBin(tab, i, k);
1715 break;
1716 }
1717 }
1718 }
1719 }
1720 if (len != 0) {
1721 if (len > 1)
1722 addCount(delta, len);
1723 break;
1724 }
1725 }
1726 }
1727 }
1728 } finally {
1729 if (delta != 0L)
1730 addCount(delta, 2);
1731 }
1732 if (npe)
1733 throw new NullPointerException();
1734 }
1735
1736 /**
1737 * Implementation for clear. Steps through each bin, removing all
1738 * nodes.
1739 */
1740 @SuppressWarnings("unchecked") private final void internalClear() {
1741 long delta = 0L; // negative number of deletions
1742 int i = 0;
1743 Node<V>[] tab = table;
1744 while (tab != null && i < tab.length) {
1745 Node<V> f = tabAt(tab, i);
1746 if (f == null)
1747 ++i;
1748 else if (f.hash < 0) {
1749 Object fk;
1750 if ((fk = f.key) instanceof TreeBin) {
1751 TreeBin<V> t = (TreeBin<V>)fk;
1752 t.acquire(0);
1753 try {
1754 if (tabAt(tab, i) == f) {
1755 for (Node<V> p = t.first; p != null; p = p.next) {
1756 if (p.val != null) { // (currently always true)
1757 p.val = null;
1758 --delta;
1759 }
1760 }
1761 t.first = null;
1762 t.root = null;
1763 ++i;
1764 }
1765 } finally {
1766 t.release(0);
1767 }
1768 }
1769 else
1770 tab = (Node<V>[])fk;
1771 }
1772 else {
1773 synchronized (f) {
1774 if (tabAt(tab, i) == f) {
1775 for (Node<V> e = f; e != null; e = e.next) {
1776 if (e.val != null) { // (currently always true)
1777 e.val = null;
1778 --delta;
1779 }
1780 }
1781 setTabAt(tab, i, null);
1782 ++i;
1783 }
1784 }
1785 }
1786 }
1787 if (delta != 0L)
1788 addCount(delta, -1);
1789 }
1790
1791 /* ---------------- Table Initialization and Resizing -------------- */
1792
1793 /**
1794 * Returns a power of two table size for the given desired capacity.
1795 * See Hackers Delight, sec 3.2
1796 */
1797 private static final int tableSizeFor(int c) {
1798 int n = c - 1;
1799 n |= n >>> 1;
1800 n |= n >>> 2;
1801 n |= n >>> 4;
1802 n |= n >>> 8;
1803 n |= n >>> 16;
1804 return (n < 0) ? 1 : (n >= MAXIMUM_CAPACITY) ? MAXIMUM_CAPACITY : n + 1;
1805 }
1806
1807 /**
1808 * Initializes table, using the size recorded in sizeCtl.
1809 */
1810 @SuppressWarnings("unchecked") private final Node<V>[] initTable() {
1811 Node<V>[] tab; int sc;
1812 while ((tab = table) == null) {
1813 if ((sc = sizeCtl) < 0)
1814 Thread.yield(); // lost initialization race; just spin
1815 else if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
1816 try {
1817 if ((tab = table) == null) {
1818 int n = (sc > 0) ? sc : DEFAULT_CAPACITY;
1819 @SuppressWarnings("rawtypes") Node[] tb = new Node[n];
1820 table = tab = (Node<V>[])tb;
1821 sc = n - (n >>> 2);
1822 }
1823 } finally {
1824 sizeCtl = sc;
1825 }
1826 break;
1827 }
1828 }
1829 return tab;
1830 }
1831
1832 /**
1833 * Adds to count, and if table is too small and not already
1834 * resizing, initiates transfer. If already resizing, helps
1835 * perform transfer if work is available. Rechecks occupancy
1836 * after a transfer to see if another resize is already needed
1837 * because resizings are lagging additions.
1838 *
1839 * @param x the count to add
1840 * @param check if <0, don't check resize, if <= 1 only check if uncontended
1841 */
1842 private final void addCount(long x, int check) {
1843 Cell[] as; long b, s;
1844 if ((as = counterCells) != null ||
1845 !U.compareAndSwapLong(this, BASECOUNT, b = baseCount, s = b + x)) {
1846 CellHashCode hc; Cell a; long v; int m;
1847 boolean uncontended = true;
1848 if ((hc = threadCellHashCode.get()) == null ||
1849 as == null || (m = as.length - 1) < 0 ||
1850 (a = as[m & hc.code]) == null ||
1851 !(uncontended =
1852 U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))) {
1853 fullAddCount(x, hc, uncontended);
1854 return;
1855 }
1856 if (check <= 1)
1857 return;
1858 s = sumCount();
1859 }
1860 if (check >= 0) {
1861 Node<V>[] tab, nt; int sc;
1862 while (s >= (long)(sc = sizeCtl) && (tab = table) != null &&
1863 tab.length < MAXIMUM_CAPACITY) {
1864 if (sc < 0) {
1865 if (sc == -1 || transferIndex <= transferOrigin ||
1866 (nt = nextTable) == null)
1867 break;
1868 if (U.compareAndSwapInt(this, SIZECTL, sc, sc - 1))
1869 transfer(tab, nt);
1870 }
1871 else if (U.compareAndSwapInt(this, SIZECTL, sc, -2))
1872 transfer(tab, null);
1873 s = sumCount();
1874 }
1875 }
1876 }
1877
1878 /**
1879 * Tries to presize table to accommodate the given number of elements.
1880 *
1881 * @param size number of elements (doesn't need to be perfectly accurate)
1882 */
1883 @SuppressWarnings("unchecked") private final void tryPresize(int size) {
1884 int c = (size >= (MAXIMUM_CAPACITY >>> 1)) ? MAXIMUM_CAPACITY :
1885 tableSizeFor(size + (size >>> 1) + 1);
1886 int sc;
1887 while ((sc = sizeCtl) >= 0) {
1888 Node<V>[] tab = table; int n;
1889 if (tab == null || (n = tab.length) == 0) {
1890 n = (sc > c) ? sc : c;
1891 if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
1892 try {
1893 if (table == tab) {
1894 @SuppressWarnings("rawtypes") Node[] tb = new Node[n];
1895 table = (Node<V>[])tb;
1896 sc = n - (n >>> 2);
1897 }
1898 } finally {
1899 sizeCtl = sc;
1900 }
1901 }
1902 }
1903 else if (c <= sc || n >= MAXIMUM_CAPACITY)
1904 break;
1905 else if (tab == table &&
1906 U.compareAndSwapInt(this, SIZECTL, sc, -2))
1907 transfer(tab, null);
1908 }
1909 }
1910
1911 /*
1912 * Moves and/or copies the nodes in each bin to new table. See
1913 * above for explanation.
1914 */
1915 @SuppressWarnings("unchecked") private final void transfer
1916 (Node<V>[] tab, Node<V>[] nextTab) {
1917 int n = tab.length, stride;
1918 if ((stride = (NCPU > 1) ? (n >>> 3) / NCPU : n) < MIN_TRANSFER_STRIDE)
1919 stride = MIN_TRANSFER_STRIDE; // subdivide range
1920 if (nextTab == null) { // initiating
1921 try {
1922 @SuppressWarnings("rawtypes") Node[] tb = new Node[n << 1];
1923 nextTab = (Node<V>[])tb;
1924 } catch (Throwable ex) { // try to cope with OOME
1925 sizeCtl = Integer.MAX_VALUE;
1926 return;
1927 }
1928 nextTable = nextTab;
1929 transferOrigin = n;
1930 transferIndex = n;
1931 Node<V> rev = new Node<V>(MOVED, tab, null, null);
1932 for (int k = n; k > 0;) { // progressively reveal ready slots
1933 int nextk = (k > stride) ? k - stride : 0;
1934 for (int m = nextk; m < k; ++m)
1935 nextTab[m] = rev;
1936 for (int m = n + nextk; m < n + k; ++m)
1937 nextTab[m] = rev;
1938 U.putOrderedInt(this, TRANSFERORIGIN, k = nextk);
1939 }
1940 }
1941 int nextn = nextTab.length;
1942 Node<V> fwd = new Node<V>(MOVED, nextTab, null, null);
1943 boolean advance = true;
1944 for (int i = 0, bound = 0;;) {
1945 int nextIndex, nextBound; Node<V> f; Object fk;
1946 while (advance) {
1947 if (--i >= bound)
1948 advance = false;
1949 else if ((nextIndex = transferIndex) <= transferOrigin) {
1950 i = -1;
1951 advance = false;
1952 }
1953 else if (U.compareAndSwapInt
1954 (this, TRANSFERINDEX, nextIndex,
1955 nextBound = (nextIndex > stride ?
1956 nextIndex - stride : 0))) {
1957 bound = nextBound;
1958 i = nextIndex - 1;
1959 advance = false;
1960 }
1961 }
1962 if (i < 0 || i >= n || i + n >= nextn) {
1963 for (int sc;;) {
1964 if (U.compareAndSwapInt(this, SIZECTL, sc = sizeCtl, ++sc)) {
1965 if (sc == -1) {
1966 nextTable = null;
1967 table = nextTab;
1968 sizeCtl = (n << 1) - (n >>> 1);
1969 }
1970 return;
1971 }
1972 }
1973 }
1974 else if ((f = tabAt(tab, i)) == null) {
1975 if (casTabAt(tab, i, null, fwd)) {
1976 setTabAt(nextTab, i, null);
1977 setTabAt(nextTab, i + n, null);
1978 advance = true;
1979 }
1980 }
1981 else if (f.hash >= 0) {
1982 synchronized (f) {
1983 if (tabAt(tab, i) == f) {
1984 int runBit = f.hash & n;
1985 Node<V> lastRun = f, lo = null, hi = null;
1986 for (Node<V> p = f.next; p != null; p = p.next) {
1987 int b = p.hash & n;
1988 if (b != runBit) {
1989 runBit = b;
1990 lastRun = p;
1991 }
1992 }
1993 if (runBit == 0)
1994 lo = lastRun;
1995 else
1996 hi = lastRun;
1997 for (Node<V> p = f; p != lastRun; p = p.next) {
1998 int ph = p.hash;
1999 Object pk = p.key; V pv = p.val;
2000 if ((ph & n) == 0)
2001 lo = new Node<V>(ph, pk, pv, lo);
2002 else
2003 hi = new Node<V>(ph, pk, pv, hi);
2004 }
2005 setTabAt(nextTab, i, lo);
2006 setTabAt(nextTab, i + n, hi);
2007 setTabAt(tab, i, fwd);
2008 advance = true;
2009 }
2010 }
2011 }
2012 else if ((fk = f.key) instanceof TreeBin) {
2013 TreeBin<V> t = (TreeBin<V>)fk;
2014 t.acquire(0);
2015 try {
2016 if (tabAt(tab, i) == f) {
2017 TreeBin<V> lt = new TreeBin<V>();
2018 TreeBin<V> ht = new TreeBin<V>();
2019 int lc = 0, hc = 0;
2020 for (Node<V> e = t.first; e != null; e = e.next) {
2021 int h = e.hash;
2022 Object k = e.key; V v = e.val;
2023 if ((h & n) == 0) {
2024 ++lc;
2025 lt.putTreeNode(h, k, v);
2026 }
2027 else {
2028 ++hc;
2029 ht.putTreeNode(h, k, v);
2030 }
2031 }
2032 Node<V> ln, hn; // throw away trees if too small
2033 if (lc < TREE_THRESHOLD) {
2034 ln = null;
2035 for (Node<V> p = lt.first; p != null; p = p.next)
2036 ln = new Node<V>(p.hash, p.key, p.val, ln);
2037 }
2038 else
2039 ln = new Node<V>(MOVED, lt, null, null);
2040 setTabAt(nextTab, i, ln);
2041 if (hc < TREE_THRESHOLD) {
2042 hn = null;
2043 for (Node<V> p = ht.first; p != null; p = p.next)
2044 hn = new Node<V>(p.hash, p.key, p.val, hn);
2045 }
2046 else
2047 hn = new Node<V>(MOVED, ht, null, null);
2048 setTabAt(nextTab, i + n, hn);
2049 setTabAt(tab, i, fwd);
2050 advance = true;
2051 }
2052 } finally {
2053 t.release(0);
2054 }
2055 }
2056 else
2057 advance = true; // already processed
2058 }
2059 }
2060
2061 /* ---------------- Counter support -------------- */
2062
2063 final long sumCount() {
2064 Cell[] as = counterCells; Cell a;
2065 long sum = baseCount;
2066 if (as != null) {
2067 for (int i = 0; i < as.length; ++i) {
2068 if ((a = as[i]) != null)
2069 sum += a.value;
2070 }
2071 }
2072 return sum;
2073 }
2074
2075 // See LongAdder version for explanation
2076 private final void fullAddCount(long x, CellHashCode hc,
2077 boolean wasUncontended) {
2078 int h;
2079 if (hc == null) {
2080 hc = new CellHashCode();
2081 int s = cellHashCodeGenerator.addAndGet(SEED_INCREMENT);
2082 h = hc.code = (s == 0) ? 1 : s; // Avoid zero
2083 threadCellHashCode.set(hc);
2084 }
2085 else
2086 h = hc.code;
2087 boolean collide = false; // True if last slot nonempty
2088 for (;;) {
2089 Cell[] as; Cell a; int n; long v;
2090 if ((as = counterCells) != null && (n = as.length) > 0) {
2091 if ((a = as[(n - 1) & h]) == null) {
2092 if (cellsBusy == 0) { // Try to attach new Cell
2093 Cell r = new Cell(x); // Optimistic create
2094 if (cellsBusy == 0 &&
2095 U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
2096 boolean created = false;
2097 try { // Recheck under lock
2098 Cell[] rs; int m, j;
2099 if ((rs = counterCells) != null &&
2100 (m = rs.length) > 0 &&
2101 rs[j = (m - 1) & h] == null) {
2102 rs[j] = r;
2103 created = true;
2104 }
2105 } finally {
2106 cellsBusy = 0;
2107 }
2108 if (created)
2109 break;
2110 continue; // Slot is now non-empty
2111 }
2112 }
2113 collide = false;
2114 }
2115 else if (!wasUncontended) // CAS already known to fail
2116 wasUncontended = true; // Continue after rehash
2117 else if (U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))
2118 break;
2119 else if (counterCells != as || n >= NCPU)
2120 collide = false; // At max size or stale
2121 else if (!collide)
2122 collide = true;
2123 else if (cellsBusy == 0 &&
2124 U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
2125 try {
2126 if (counterCells == as) {// Expand table unless stale
2127 Cell[] rs = new Cell[n << 1];
2128 for (int i = 0; i < n; ++i)
2129 rs[i] = as[i];
2130 counterCells = rs;
2131 }
2132 } finally {
2133 cellsBusy = 0;
2134 }
2135 collide = false;
2136 continue; // Retry with expanded table
2137 }
2138 h ^= h << 13; // Rehash
2139 h ^= h >>> 17;
2140 h ^= h << 5;
2141 }
2142 else if (cellsBusy == 0 && counterCells == as &&
2143 U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
2144 boolean init = false;
2145 try { // Initialize table
2146 if (counterCells == as) {
2147 Cell[] rs = new Cell[2];
2148 rs[h & 1] = new Cell(x);
2149 counterCells = rs;
2150 init = true;
2151 }
2152 } finally {
2153 cellsBusy = 0;
2154 }
2155 if (init)
2156 break;
2157 }
2158 else if (U.compareAndSwapLong(this, BASECOUNT, v = baseCount, v + x))
2159 break; // Fall back on using base
2160 }
2161 hc.code = h; // Record index for next time
2162 }
2163
2164 /* ----------------Table Traversal -------------- */
2165
2166 /**
2167 * Encapsulates traversal for methods such as containsValue; also
2168 * serves as a base class for other iterators and bulk tasks.
2169 *
2170 * At each step, the iterator snapshots the key ("nextKey") and
2171 * value ("nextVal") of a valid node (i.e., one that, at point of
2172 * snapshot, has a non-null user value). Because val fields can
2173 * change (including to null, indicating deletion), field nextVal
2174 * might not be accurate at point of use, but still maintains the
2175 * weak consistency property of holding a value that was once
2176 * valid. To support iterator.remove, the nextKey field is not
2177 * updated (nulled out) when the iterator cannot advance.
2178 *
2179 * Internal traversals directly access these fields, as in:
2180 * {@code while (it.advance() != null) { process(it.nextKey); }}
2181 *
2182 * Exported iterators must track whether the iterator has advanced
2183 * (in hasNext vs next) (by setting/checking/nulling field
2184 * nextVal), and then extract key, value, or key-value pairs as
2185 * return values of next().
2186 *
2187 * The iterator visits once each still-valid node that was
2188 * reachable upon iterator construction. It might miss some that
2189 * were added to a bin after the bin was visited, which is OK wrt
2190 * consistency guarantees. Maintaining this property in the face
2191 * of possible ongoing resizes requires a fair amount of
2192 * bookkeeping state that is difficult to optimize away amidst
2193 * volatile accesses. Even so, traversal maintains reasonable
2194 * throughput.
2195 *
2196 * Normally, iteration proceeds bin-by-bin traversing lists.
2197 * However, if the table has been resized, then all future steps
2198 * must traverse both the bin at the current index as well as at
2199 * (index + baseSize); and so on for further resizings. To
2200 * paranoically cope with potential sharing by users of iterators
2201 * across threads, iteration terminates if a bounds checks fails
2202 * for a table read.
2203 *
2204 * This class supports both Spliterator-based traversal and
2205 * CountedCompleter-based bulk tasks. The same "batch" field is
2206 * used, but in slightly different ways, in the two cases. For
2207 * Spliterators, it is a saturating (at Integer.MAX_VALUE)
2208 * estimate of element coverage. For CHM tasks, it is a pre-scaled
2209 * size that halves down to zero for leaf tasks, that is only
2210 * computed upon execution of the task. (Tasks can be submitted to
2211 * any pool, of any size, so we don't know scale factors until
2212 * running.)
2213 *
2214 * This class extends CountedCompleter to streamline parallel
2215 * iteration in bulk operations. This adds only a few fields of
2216 * space overhead, which is small enough in cases where it is not
2217 * needed to not worry about it. Because CountedCompleter is
2218 * Serializable, but iterators need not be, we need to add warning
2219 * suppressions.
2220 */
2221 @SuppressWarnings("serial") static class Traverser<K,V,R>
2222 extends CountedCompleter<R> {
2223 final ConcurrentHashMap<K, V> map;
2224 Node<V> next; // the next entry to use
2225 Object nextKey; // cached key field of next
2226 V nextVal; // cached val field of next
2227 Node<V>[] tab; // current table; updated if resized
2228 int index; // index of bin to use next
2229 int baseIndex; // current index of initial table
2230 int baseLimit; // index bound for initial table
2231 int baseSize; // initial table size
2232 int batch; // split control
2233 /** Creates iterator for all entries in the table. */
2234 Traverser(ConcurrentHashMap<K, V> map) {
2235 this.map = map;
2236 Node<V>[] t;
2237 if ((t = tab = map.table) != null)
2238 baseLimit = baseSize = t.length;
2239 }
2240
2241 /** Task constructor */
2242 Traverser(ConcurrentHashMap<K,V> map, Traverser<K,V,?> it, int batch) {
2243 super(it);
2244 this.map = map;
2245 this.batch = batch; // -1 if unknown
2246 if (it == null) {
2247 Node<V>[] t;
2248 if ((t = tab = map.table) != null)
2249 baseLimit = baseSize = t.length;
2250 }
2251 else { // split parent
2252 this.tab = it.tab;
2253 this.baseSize = it.baseSize;
2254 int hi = this.baseLimit = it.baseLimit;
2255 it.baseLimit = this.index = this.baseIndex =
2256 (hi + it.baseIndex + 1) >>> 1;
2257 }
2258 }
2259
2260 /** Spliterator constructor */
2261 Traverser(ConcurrentHashMap<K,V> map, Traverser<K,V,?> it) {
2262 super(it);
2263 this.map = map;
2264 if (it == null) {
2265 Node<V>[] t;
2266 if ((t = tab = map.table) != null)
2267 baseLimit = baseSize = t.length;
2268 long n = map.sumCount();
2269 batch = ((n > (long)Integer.MAX_VALUE) ? Integer.MAX_VALUE :
2270 (int)n);
2271 }
2272 else {
2273 this.tab = it.tab;
2274 this.baseSize = it.baseSize;
2275 int hi = this.baseLimit = it.baseLimit;
2276 it.baseLimit = this.index = this.baseIndex =
2277 (hi + it.baseIndex + 1) >>> 1;
2278 this.batch = it.batch >>>= 1;
2279 }
2280 }
2281
2282 /**
2283 * Advances next; returns nextVal or null if terminated.
2284 * See above for explanation.
2285 */
2286 @SuppressWarnings("unchecked") final V advance() {
2287 Node<V> e = next;
2288 V ev = null;
2289 outer: do {
2290 if (e != null) // advance past used/skipped node
2291 e = e.next;
2292 while (e == null) { // get to next non-null bin
2293 ConcurrentHashMap<K, V> m;
2294 Node<V>[] t; int b, i, n; Object ek; // must use locals
2295 if ((t = tab) != null)
2296 n = t.length;
2297 else if ((m = map) != null && (t = tab = m.table) != null)
2298 n = baseLimit = baseSize = t.length;
2299 else
2300 break outer;
2301 if ((b = baseIndex) >= baseLimit ||
2302 (i = index) < 0 || i >= n)
2303 break outer;
2304 if ((e = tabAt(t, i)) != null && e.hash < 0) {
2305 if ((ek = e.key) instanceof TreeBin)
2306 e = ((TreeBin<V>)ek).first;
2307 else {
2308 tab = (Node<V>[])ek;
2309 continue; // restarts due to null val
2310 }
2311 } // visit upper slots if present
2312 index = (i += baseSize) < n ? i : (baseIndex = b + 1);
2313 }
2314 nextKey = e.key;
2315 } while ((ev = e.val) == null); // skip deleted or special nodes
2316 next = e;
2317 return nextVal = ev;
2318 }
2319
2320 public final void remove() {
2321 Object k = nextKey;
2322 if (k == null && (advance() == null || (k = nextKey) == null))
2323 throw new IllegalStateException();
2324 map.internalReplace(k, null, null);
2325 }
2326
2327 public final boolean hasNext() {
2328 return nextVal != null || advance() != null;
2329 }
2330
2331 public final boolean hasMoreElements() { return hasNext(); }
2332
2333 public void compute() { } // default no-op CountedCompleter body
2334
2335 /**
2336 * Returns a batch value > 0 if this task should (and must) be
2337 * split, if so, adding to pending count, and in any case
2338 * updating batch value. The initial batch value is approx
2339 * exp2 of the number of times (minus one) to split task by
2340 * two before executing leaf action. This value is faster to
2341 * compute and more convenient to use as a guide to splitting
2342 * than is the depth, since it is used while dividing by two
2343 * anyway.
2344 */
2345 final int preSplit() {
2346 int b; ForkJoinPool pool;
2347 if ((b = batch) < 0) { // force initialization
2348 int sp = (((pool = getPool()) == null) ?
2349 ForkJoinPool.getCommonPoolParallelism() :
2350 pool.getParallelism()) << 3; // slack of 8
2351 long n = map.sumCount();
2352 b = (n <= 0L) ? 0 : (n < (long)sp) ? (int)n : sp;
2353 }
2354 b = (b <= 1 || baseIndex == baseLimit) ? 0 : (b >>> 1);
2355 if ((batch = b) > 0)
2356 addToPendingCount(1);
2357 return b;
2358 }
2359
2360 // spliterator support
2361
2362 public long exactSizeIfKnown() {
2363 return -1;
2364 }
2365
2366 public boolean hasExactSplits() {
2367 return false;
2368 }
2369
2370 public long estimateSize() {
2371 return batch;
2372 }
2373 }
2374
2375 /* ---------------- Public operations -------------- */
2376
2377 /**
2378 * Creates a new, empty map with the default initial table size (16).
2379 */
2380 public ConcurrentHashMap() {
2381 }
2382
2383 /**
2384 * Creates a new, empty map with an initial table size
2385 * accommodating the specified number of elements without the need
2386 * to dynamically resize.
2387 *
2388 * @param initialCapacity The implementation performs internal
2389 * sizing to accommodate this many elements.
2390 * @throws IllegalArgumentException if the initial capacity of
2391 * elements is negative
2392 */
2393 public ConcurrentHashMap(int initialCapacity) {
2394 if (initialCapacity < 0)
2395 throw new IllegalArgumentException();
2396 int cap = ((initialCapacity >= (MAXIMUM_CAPACITY >>> 1)) ?
2397 MAXIMUM_CAPACITY :
2398 tableSizeFor(initialCapacity + (initialCapacity >>> 1) + 1));
2399 this.sizeCtl = cap;
2400 }
2401
2402 /**
2403 * Creates a new map with the same mappings as the given map.
2404 *
2405 * @param m the map
2406 */
2407 public ConcurrentHashMap(Map<? extends K, ? extends V> m) {
2408 this.sizeCtl = DEFAULT_CAPACITY;
2409 internalPutAll(m);
2410 }
2411
2412 /**
2413 * Creates a new, empty map with an initial table size based on
2414 * the given number of elements ({@code initialCapacity}) and
2415 * initial table density ({@code loadFactor}).
2416 *
2417 * @param initialCapacity the initial capacity. The implementation
2418 * performs internal sizing to accommodate this many elements,
2419 * given the specified load factor.
2420 * @param loadFactor the load factor (table density) for
2421 * establishing the initial table size
2422 * @throws IllegalArgumentException if the initial capacity of
2423 * elements is negative or the load factor is nonpositive
2424 *
2425 * @since 1.6
2426 */
2427 public ConcurrentHashMap(int initialCapacity, float loadFactor) {
2428 this(initialCapacity, loadFactor, 1);
2429 }
2430
2431 /**
2432 * Creates a new, empty map with an initial table size based on
2433 * the given number of elements ({@code initialCapacity}), table
2434 * density ({@code loadFactor}), and number of concurrently
2435 * updating threads ({@code concurrencyLevel}).
2436 *
2437 * @param initialCapacity the initial capacity. The implementation
2438 * performs internal sizing to accommodate this many elements,
2439 * given the specified load factor.
2440 * @param loadFactor the load factor (table density) for
2441 * establishing the initial table size
2442 * @param concurrencyLevel the estimated number of concurrently
2443 * updating threads. The implementation may use this value as
2444 * a sizing hint.
2445 * @throws IllegalArgumentException if the initial capacity is
2446 * negative or the load factor or concurrencyLevel are
2447 * nonpositive
2448 */
2449 public ConcurrentHashMap(int initialCapacity,
2450 float loadFactor, int concurrencyLevel) {
2451 if (!(loadFactor > 0.0f) || initialCapacity < 0 || concurrencyLevel <= 0)
2452 throw new IllegalArgumentException();
2453 if (initialCapacity < concurrencyLevel) // Use at least as many bins
2454 initialCapacity = concurrencyLevel; // as estimated threads
2455 long size = (long)(1.0 + (long)initialCapacity / loadFactor);
2456 int cap = (size >= (long)MAXIMUM_CAPACITY) ?
2457 MAXIMUM_CAPACITY : tableSizeFor((int)size);
2458 this.sizeCtl = cap;
2459 }
2460
2461 /**
2462 * Creates a new {@link Set} backed by a ConcurrentHashMap
2463 * from the given type to {@code Boolean.TRUE}.
2464 *
2465 * @return the new set
2466 */
2467 public static <K> KeySetView<K,Boolean> newKeySet() {
2468 return new KeySetView<K,Boolean>(new ConcurrentHashMap<K,Boolean>(),
2469 Boolean.TRUE);
2470 }
2471
2472 /**
2473 * Creates a new {@link Set} backed by a ConcurrentHashMap
2474 * from the given type to {@code Boolean.TRUE}.
2475 *
2476 * @param initialCapacity The implementation performs internal
2477 * sizing to accommodate this many elements.
2478 * @throws IllegalArgumentException if the initial capacity of
2479 * elements is negative
2480 * @return the new set
2481 */
2482 public static <K> KeySetView<K,Boolean> newKeySet(int initialCapacity) {
2483 return new KeySetView<K,Boolean>
2484 (new ConcurrentHashMap<K,Boolean>(initialCapacity), Boolean.TRUE);
2485 }
2486
2487 /**
2488 * {@inheritDoc}
2489 */
2490 public boolean isEmpty() {
2491 return sumCount() <= 0L; // ignore transient negative values
2492 }
2493
2494 /**
2495 * {@inheritDoc}
2496 */
2497 public int size() {
2498 long n = sumCount();
2499 return ((n < 0L) ? 0 :
2500 (n > (long)Integer.MAX_VALUE) ? Integer.MAX_VALUE :
2501 (int)n);
2502 }
2503
2504 /**
2505 * Returns the number of mappings. This method should be used
2506 * instead of {@link #size} because a ConcurrentHashMap may
2507 * contain more mappings than can be represented as an int. The
2508 * value returned is an estimate; the actual count may differ if
2509 * there are concurrent insertions or removals.
2510 *
2511 * @return the number of mappings
2512 */
2513 public long mappingCount() {
2514 long n = sumCount();
2515 return (n < 0L) ? 0L : n; // ignore transient negative values
2516 }
2517
2518 /**
2519 * Returns the value to which the specified key is mapped,
2520 * or {@code null} if this map contains no mapping for the key.
2521 *
2522 * <p>More formally, if this map contains a mapping from a key
2523 * {@code k} to a value {@code v} such that {@code key.equals(k)},
2524 * then this method returns {@code v}; otherwise it returns
2525 * {@code null}. (There can be at most one such mapping.)
2526 *
2527 * @throws NullPointerException if the specified key is null
2528 */
2529 public V get(Object key) {
2530 return internalGet(key);
2531 }
2532
2533 /**
2534 * Returns the value to which the specified key is mapped,
2535 * or the given defaultValue if this map contains no mapping for the key.
2536 *
2537 * @param key the key
2538 * @param defaultValue the value to return if this map contains
2539 * no mapping for the given key
2540 * @return the mapping for the key, if present; else the defaultValue
2541 * @throws NullPointerException if the specified key is null
2542 */
2543 public V getValueOrDefault(Object key, V defaultValue) {
2544 V v;
2545 return (v = internalGet(key)) == null ? defaultValue : v;
2546 }
2547
2548 /**
2549 * Tests if the specified object is a key in this table.
2550 *
2551 * @param key possible key
2552 * @return {@code true} if and only if the specified object
2553 * is a key in this table, as determined by the
2554 * {@code equals} method; {@code false} otherwise
2555 * @throws NullPointerException if the specified key is null
2556 */
2557 public boolean containsKey(Object key) {
2558 return internalGet(key) != null;
2559 }
2560
2561 /**
2562 * Returns {@code true} if this map maps one or more keys to the
2563 * specified value. Note: This method may require a full traversal
2564 * of the map, and is much slower than method {@code containsKey}.
2565 *
2566 * @param value value whose presence in this map is to be tested
2567 * @return {@code true} if this map maps one or more keys to the
2568 * specified value
2569 * @throws NullPointerException if the specified value is null
2570 */
2571 public boolean containsValue(Object value) {
2572 if (value == null)
2573 throw new NullPointerException();
2574 V v;
2575 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
2576 while ((v = it.advance()) != null) {
2577 if (v == value || value.equals(v))
2578 return true;
2579 }
2580 return false;
2581 }
2582
2583 /**
2584 * Legacy method testing if some key maps into the specified value
2585 * in this table. This method is identical in functionality to
2586 * {@link #containsValue}, and exists solely to ensure
2587 * full compatibility with class {@link java.util.Hashtable},
2588 * which supported this method prior to introduction of the
2589 * Java Collections framework.
2590 *
2591 * @param value a value to search for
2592 * @return {@code true} if and only if some key maps to the
2593 * {@code value} argument in this table as
2594 * determined by the {@code equals} method;
2595 * {@code false} otherwise
2596 * @throws NullPointerException if the specified value is null
2597 */
2598 @Deprecated public boolean contains(Object value) {
2599 return containsValue(value);
2600 }
2601
2602 /**
2603 * Maps the specified key to the specified value in this table.
2604 * Neither the key nor the value can be null.
2605 *
2606 * <p>The value can be retrieved by calling the {@code get} method
2607 * with a key that is equal to the original key.
2608 *
2609 * @param key key with which the specified value is to be associated
2610 * @param value value to be associated with the specified key
2611 * @return the previous value associated with {@code key}, or
2612 * {@code null} if there was no mapping for {@code key}
2613 * @throws NullPointerException if the specified key or value is null
2614 */
2615 public V put(K key, V value) {
2616 return internalPut(key, value, false);
2617 }
2618
2619 /**
2620 * {@inheritDoc}
2621 *
2622 * @return the previous value associated with the specified key,
2623 * or {@code null} if there was no mapping for the key
2624 * @throws NullPointerException if the specified key or value is null
2625 */
2626 public V putIfAbsent(K key, V value) {
2627 return internalPut(key, value, true);
2628 }
2629
2630 /**
2631 * Copies all of the mappings from the specified map to this one.
2632 * These mappings replace any mappings that this map had for any of the
2633 * keys currently in the specified map.
2634 *
2635 * @param m mappings to be stored in this map
2636 */
2637 public void putAll(Map<? extends K, ? extends V> m) {
2638 internalPutAll(m);
2639 }
2640
2641 /**
2642 * If the specified key is not already associated with a value (or
2643 * is mapped to {@code null}), attempts to compute its value using
2644 * the given mapping function and enters it into this map unless
2645 * {@code null}. The entire method invocation is performed
2646 * atomically, so the function is applied at most once per key.
2647 * Some attempted update operations on this map by other threads
2648 * may be blocked while computation is in progress, so the
2649 * computation should be short and simple, and must not attempt to
2650 * update any other mappings of this Map.
2651 *
2652 * @param key key with which the specified value is to be associated
2653 * @param mappingFunction the function to compute a value
2654 * @return the current (existing or computed) value associated with
2655 * the specified key, or null if the computed value is null
2656 * @throws NullPointerException if the specified key or mappingFunction
2657 * is null
2658 * @throws IllegalStateException if the computation detectably
2659 * attempts a recursive update to this map that would
2660 * otherwise never complete
2661 * @throws RuntimeException or Error if the mappingFunction does so,
2662 * in which case the mapping is left unestablished
2663 */
2664 public V computeIfAbsent
2665 (K key, Function<? super K, ? extends V> mappingFunction) {
2666 return internalComputeIfAbsent(key, mappingFunction);
2667 }
2668
2669 /**
2670 * If the value for the specified key is present and non-null,
2671 * attempts to compute a new mapping given the key and its current
2672 * mapped value. The entire method invocation is performed
2673 * atomically. Some attempted update operations on this map by
2674 * other threads may be blocked while computation is in progress,
2675 * so the computation should be short and simple, and must not
2676 * attempt to update any other mappings of this Map.
2677 *
2678 * @param key key with which the specified value is to be associated
2679 * @param remappingFunction the function to compute a value
2680 * @return the new value associated with the specified key, or null if none
2681 * @throws NullPointerException if the specified key or remappingFunction
2682 * is null
2683 * @throws IllegalStateException if the computation detectably
2684 * attempts a recursive update to this map that would
2685 * otherwise never complete
2686 * @throws RuntimeException or Error if the remappingFunction does so,
2687 * in which case the mapping is unchanged
2688 */
2689 public V computeIfPresent
2690 (K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
2691 return internalCompute(key, true, remappingFunction);
2692 }
2693
2694 /**
2695 * Attempts to compute a mapping for the specified key and its
2696 * current mapped value (or {@code null} if there is no current
2697 * mapping). The entire method invocation is performed atomically.
2698 * Some attempted update operations on this map by other threads
2699 * may be blocked while computation is in progress, so the
2700 * computation should be short and simple, and must not attempt to
2701 * update any other mappings of this Map.
2702 *
2703 * @param key key with which the specified value is to be associated
2704 * @param remappingFunction the function to compute a value
2705 * @return the new value associated with the specified key, or null if none
2706 * @throws NullPointerException if the specified key or remappingFunction
2707 * is null
2708 * @throws IllegalStateException if the computation detectably
2709 * attempts a recursive update to this map that would
2710 * otherwise never complete
2711 * @throws RuntimeException or Error if the remappingFunction does so,
2712 * in which case the mapping is unchanged
2713 */
2714 public V compute
2715 (K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
2716 return internalCompute(key, false, remappingFunction);
2717 }
2718
2719 /**
2720 * If the specified key is not already associated with a
2721 * (non-null) value, associates it with the given value.
2722 * Otherwise, replaces the value with the results of the given
2723 * remapping function, or removes if {@code null}. The entire
2724 * method invocation is performed atomically. Some attempted
2725 * update operations on this map by other threads may be blocked
2726 * while computation is in progress, so the computation should be
2727 * short and simple, and must not attempt to update any other
2728 * mappings of this Map.
2729 *
2730 * @param key key with which the specified value is to be associated
2731 * @param value the value to use if absent
2732 * @param remappingFunction the function to recompute a value if present
2733 * @return the new value associated with the specified key, or null if none
2734 * @throws NullPointerException if the specified key or the
2735 * remappingFunction is null
2736 * @throws RuntimeException or Error if the remappingFunction does so,
2737 * in which case the mapping is unchanged
2738 */
2739 public V merge
2740 (K key, V value,
2741 BiFunction<? super V, ? super V, ? extends V> remappingFunction) {
2742 return internalMerge(key, value, remappingFunction);
2743 }
2744
2745 /**
2746 * Removes the key (and its corresponding value) from this map.
2747 * This method does nothing if the key is not in the map.
2748 *
2749 * @param key the key that needs to be removed
2750 * @return the previous value associated with {@code key}, or
2751 * {@code null} if there was no mapping for {@code key}
2752 * @throws NullPointerException if the specified key is null
2753 */
2754 public V remove(Object key) {
2755 return internalReplace(key, null, null);
2756 }
2757
2758 /**
2759 * {@inheritDoc}
2760 *
2761 * @throws NullPointerException if the specified key is null
2762 */
2763 public boolean remove(Object key, Object value) {
2764 return value != null && internalReplace(key, null, value) != null;
2765 }
2766
2767 /**
2768 * {@inheritDoc}
2769 *
2770 * @throws NullPointerException if any of the arguments are null
2771 */
2772 public boolean replace(K key, V oldValue, V newValue) {
2773 if (key == null || oldValue == null || newValue == null)
2774 throw new NullPointerException();
2775 return internalReplace(key, newValue, oldValue) != null;
2776 }
2777
2778 /**
2779 * {@inheritDoc}
2780 *
2781 * @return the previous value associated with the specified key,
2782 * or {@code null} if there was no mapping for the key
2783 * @throws NullPointerException if the specified key or value is null
2784 */
2785 public V replace(K key, V value) {
2786 if (key == null || value == null)
2787 throw new NullPointerException();
2788 return internalReplace(key, value, null);
2789 }
2790
2791 /**
2792 * Removes all of the mappings from this map.
2793 */
2794 public void clear() {
2795 internalClear();
2796 }
2797
2798 /**
2799 * Returns a {@link Set} view of the keys contained in this map.
2800 * The set is backed by the map, so changes to the map are
2801 * reflected in the set, and vice-versa.
2802 *
2803 * @return the set view
2804 */
2805 public KeySetView<K,V> keySet() {
2806 KeySetView<K,V> ks = keySet;
2807 return (ks != null) ? ks : (keySet = new KeySetView<K,V>(this, null));
2808 }
2809
2810 /**
2811 * Returns a {@link Set} view of the keys in this map, using the
2812 * given common mapped value for any additions (i.e., {@link
2813 * Collection#add} and {@link Collection#addAll}). This is of
2814 * course only appropriate if it is acceptable to use the same
2815 * value for all additions from this view.
2816 *
2817 * @param mappedValue the mapped value to use for any
2818 * additions.
2819 * @return the set view
2820 * @throws NullPointerException if the mappedValue is null
2821 */
2822 public KeySetView<K,V> keySet(V mappedValue) {
2823 if (mappedValue == null)
2824 throw new NullPointerException();
2825 return new KeySetView<K,V>(this, mappedValue);
2826 }
2827
2828 /**
2829 * Returns a {@link Collection} view of the values contained in this map.
2830 * The collection is backed by the map, so changes to the map are
2831 * reflected in the collection, and vice-versa.
2832 */
2833 public ValuesView<K,V> values() {
2834 ValuesView<K,V> vs = values;
2835 return (vs != null) ? vs : (values = new ValuesView<K,V>(this));
2836 }
2837
2838 /**
2839 * Returns a {@link Set} view of the mappings contained in this map.
2840 * The set is backed by the map, so changes to the map are
2841 * reflected in the set, and vice-versa. The set supports element
2842 * removal, which removes the corresponding mapping from the map,
2843 * via the {@code Iterator.remove}, {@code Set.remove},
2844 * {@code removeAll}, {@code retainAll}, and {@code clear}
2845 * operations. It does not support the {@code add} or
2846 * {@code addAll} operations.
2847 *
2848 * <p>The view's {@code iterator} is a "weakly consistent" iterator
2849 * that will never throw {@link ConcurrentModificationException},
2850 * and guarantees to traverse elements as they existed upon
2851 * construction of the iterator, and may (but is not guaranteed to)
2852 * reflect any modifications subsequent to construction.
2853 */
2854 public Set<Map.Entry<K,V>> entrySet() {
2855 EntrySetView<K,V> es = entrySet;
2856 return (es != null) ? es : (entrySet = new EntrySetView<K,V>(this));
2857 }
2858
2859 /**
2860 * Returns an enumeration of the keys in this table.
2861 *
2862 * @return an enumeration of the keys in this table
2863 * @see #keySet()
2864 */
2865 public Enumeration<K> keys() {
2866 return new KeyIterator<K,V>(this);
2867 }
2868
2869 /**
2870 * Returns an enumeration of the values in this table.
2871 *
2872 * @return an enumeration of the values in this table
2873 * @see #values()
2874 */
2875 public Enumeration<V> elements() {
2876 return new ValueIterator<K,V>(this);
2877 }
2878
2879 /**
2880 * Returns the hash code value for this {@link Map}, i.e.,
2881 * the sum of, for each key-value pair in the map,
2882 * {@code key.hashCode() ^ value.hashCode()}.
2883 *
2884 * @return the hash code value for this map
2885 */
2886 public int hashCode() {
2887 int h = 0;
2888 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
2889 V v;
2890 while ((v = it.advance()) != null) {
2891 h += it.nextKey.hashCode() ^ v.hashCode();
2892 }
2893 return h;
2894 }
2895
2896 /**
2897 * Returns a string representation of this map. The string
2898 * representation consists of a list of key-value mappings (in no
2899 * particular order) enclosed in braces ("{@code {}}"). Adjacent
2900 * mappings are separated by the characters {@code ", "} (comma
2901 * and space). Each key-value mapping is rendered as the key
2902 * followed by an equals sign ("{@code =}") followed by the
2903 * associated value.
2904 *
2905 * @return a string representation of this map
2906 */
2907 public String toString() {
2908 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
2909 StringBuilder sb = new StringBuilder();
2910 sb.append('{');
2911 V v;
2912 if ((v = it.advance()) != null) {
2913 for (;;) {
2914 Object k = it.nextKey;
2915 sb.append(k == this ? "(this Map)" : k);
2916 sb.append('=');
2917 sb.append(v == this ? "(this Map)" : v);
2918 if ((v = it.advance()) == null)
2919 break;
2920 sb.append(',').append(' ');
2921 }
2922 }
2923 return sb.append('}').toString();
2924 }
2925
2926 /**
2927 * Compares the specified object with this map for equality.
2928 * Returns {@code true} if the given object is a map with the same
2929 * mappings as this map. This operation may return misleading
2930 * results if either map is concurrently modified during execution
2931 * of this method.
2932 *
2933 * @param o object to be compared for equality with this map
2934 * @return {@code true} if the specified object is equal to this map
2935 */
2936 public boolean equals(Object o) {
2937 if (o != this) {
2938 if (!(o instanceof Map))
2939 return false;
2940 Map<?,?> m = (Map<?,?>) o;
2941 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
2942 V val;
2943 while ((val = it.advance()) != null) {
2944 Object v = m.get(it.nextKey);
2945 if (v == null || (v != val && !v.equals(val)))
2946 return false;
2947 }
2948 for (Map.Entry<?,?> e : m.entrySet()) {
2949 Object mk, mv, v;
2950 if ((mk = e.getKey()) == null ||
2951 (mv = e.getValue()) == null ||
2952 (v = internalGet(mk)) == null ||
2953 (mv != v && !mv.equals(v)))
2954 return false;
2955 }
2956 }
2957 return true;
2958 }
2959
2960 /* ----------------Iterators -------------- */
2961
2962 @SuppressWarnings("serial") static final class KeyIterator<K,V>
2963 extends Traverser<K,V,Object>
2964 implements Spliterator<K>, Iterator<K>, Enumeration<K> {
2965 KeyIterator(ConcurrentHashMap<K, V> map) { super(map); }
2966 KeyIterator(ConcurrentHashMap<K, V> map, Traverser<K,V,Object> it) {
2967 super(map, it);
2968 }
2969 public KeyIterator<K,V> trySplit() {
2970 if (tab != null && baseIndex == baseLimit)
2971 return null;
2972 return new KeyIterator<K,V>(map, this);
2973 }
2974 @SuppressWarnings("unchecked") public final K next() {
2975 if (nextVal == null && advance() == null)
2976 throw new NoSuchElementException();
2977 Object k = nextKey;
2978 nextVal = null;
2979 return (K) k;
2980 }
2981
2982 public final K nextElement() { return next(); }
2983
2984 public Iterator<K> iterator() { return this; }
2985
2986 public void forEach(Block<? super K> action) {
2987 if (action == null) throw new NullPointerException();
2988 while (advance() != null)
2989 action.accept((K)nextKey);
2990 }
2991 }
2992
2993 @SuppressWarnings("serial") static final class ValueIterator<K,V>
2994 extends Traverser<K,V,Object>
2995 implements Spliterator<V>, Iterator<V>, Enumeration<V> {
2996 ValueIterator(ConcurrentHashMap<K, V> map) { super(map); }
2997 ValueIterator(ConcurrentHashMap<K, V> map, Traverser<K,V,Object> it) {
2998 super(map, it);
2999 }
3000 public ValueIterator<K,V> trySplit() {
3001 if (tab != null && baseIndex == baseLimit)
3002 return null;
3003 return new ValueIterator<K,V>(map, this);
3004 }
3005
3006 public final V next() {
3007 V v;
3008 if ((v = nextVal) == null && (v = advance()) == null)
3009 throw new NoSuchElementException();
3010 nextVal = null;
3011 return v;
3012 }
3013
3014 public final V nextElement() { return next(); }
3015
3016 public Iterator<V> iterator() { return this; }
3017
3018 public void forEach(Block<? super V> action) {
3019 if (action == null) throw new NullPointerException();
3020 V v;
3021 while ((v = advance()) != null)
3022 action.accept(v);
3023 }
3024 }
3025
3026 @SuppressWarnings("serial") static final class EntryIterator<K,V>
3027 extends Traverser<K,V,Object>
3028 implements Spliterator<Map.Entry<K,V>>, Iterator<Map.Entry<K,V>> {
3029 EntryIterator(ConcurrentHashMap<K, V> map) { super(map); }
3030 EntryIterator(ConcurrentHashMap<K, V> map, Traverser<K,V,Object> it) {
3031 super(map, it);
3032 }
3033 public EntryIterator<K,V> trySplit() {
3034 if (tab != null && baseIndex == baseLimit)
3035 return null;
3036 return new EntryIterator<K,V>(map, this);
3037 }
3038
3039 @SuppressWarnings("unchecked") public final Map.Entry<K,V> next() {
3040 V v;
3041 if ((v = nextVal) == null && (v = advance()) == null)
3042 throw new NoSuchElementException();
3043 Object k = nextKey;
3044 nextVal = null;
3045 return new MapEntry<K,V>((K)k, v, map);
3046 }
3047
3048 public Iterator<Map.Entry<K,V>> iterator() { return this; }
3049
3050 public void forEach(Block<? super Map.Entry<K,V>> action) {
3051 if (action == null) throw new NullPointerException();
3052 V v;
3053 while ((v = advance()) != null)
3054 action.accept(entryFor((K)nextKey, v));
3055 }
3056 }
3057
3058 /**
3059 * Exported Entry for iterators
3060 */
3061 static final class MapEntry<K,V> implements Map.Entry<K, V> {
3062 final K key; // non-null
3063 V val; // non-null
3064 final ConcurrentHashMap<K, V> map;
3065 MapEntry(K key, V val, ConcurrentHashMap<K, V> map) {
3066 this.key = key;
3067 this.val = val;
3068 this.map = map;
3069 }
3070 public final K getKey() { return key; }
3071 public final V getValue() { return val; }
3072 public final int hashCode() { return key.hashCode() ^ val.hashCode(); }
3073 public final String toString(){ return key + "=" + val; }
3074
3075 public final boolean equals(Object o) {
3076 Object k, v; Map.Entry<?,?> e;
3077 return ((o instanceof Map.Entry) &&
3078 (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
3079 (v = e.getValue()) != null &&
3080 (k == key || k.equals(key)) &&
3081 (v == val || v.equals(val)));
3082 }
3083
3084 /**
3085 * Sets our entry's value and writes through to the map. The
3086 * value to return is somewhat arbitrary here. Since we do not
3087 * necessarily track asynchronous changes, the most recent
3088 * "previous" value could be different from what we return (or
3089 * could even have been removed in which case the put will
3090 * re-establish). We do not and cannot guarantee more.
3091 */
3092 public final V setValue(V value) {
3093 if (value == null) throw new NullPointerException();
3094 V v = val;
3095 val = value;
3096 map.put(key, value);
3097 return v;
3098 }
3099 }
3100
3101 /**
3102 * Returns exportable snapshot entry for the given key and value
3103 * when write-through can't or shouldn't be used.
3104 */
3105 static <K,V> AbstractMap.SimpleEntry<K,V> entryFor(K k, V v) {
3106 return new AbstractMap.SimpleEntry<K,V>(k, v);
3107 }
3108
3109 /* ---------------- Serialization Support -------------- */
3110
3111 /**
3112 * Stripped-down version of helper class used in previous version,
3113 * declared for the sake of serialization compatibility
3114 */
3115 static class Segment<K,V> implements Serializable {
3116 private static final long serialVersionUID = 2249069246763182397L;
3117 final float loadFactor;
3118 Segment(float lf) { this.loadFactor = lf; }
3119 }
3120
3121 /**
3122 * Saves the state of the {@code ConcurrentHashMap} instance to a
3123 * stream (i.e., serializes it).
3124 * @param s the stream
3125 * @serialData
3126 * the key (Object) and value (Object)
3127 * for each key-value mapping, followed by a null pair.
3128 * The key-value mappings are emitted in no particular order.
3129 */
3130 @SuppressWarnings("unchecked") private void writeObject
3131 (java.io.ObjectOutputStream s)
3132 throws java.io.IOException {
3133 if (segments == null) { // for serialization compatibility
3134 segments = (Segment<K,V>[])
3135 new Segment<?,?>[DEFAULT_CONCURRENCY_LEVEL];
3136 for (int i = 0; i < segments.length; ++i)
3137 segments[i] = new Segment<K,V>(LOAD_FACTOR);
3138 }
3139 s.defaultWriteObject();
3140 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3141 V v;
3142 while ((v = it.advance()) != null) {
3143 s.writeObject(it.nextKey);
3144 s.writeObject(v);
3145 }
3146 s.writeObject(null);
3147 s.writeObject(null);
3148 segments = null; // throw away
3149 }
3150
3151 /**
3152 * Reconstitutes the instance from a stream (that is, deserializes it).
3153 * @param s the stream
3154 */
3155 @SuppressWarnings("unchecked") private void readObject
3156 (java.io.ObjectInputStream s)
3157 throws java.io.IOException, ClassNotFoundException {
3158 s.defaultReadObject();
3159 this.segments = null; // unneeded
3160
3161 // Create all nodes, then place in table once size is known
3162 long size = 0L;
3163 Node<V> p = null;
3164 for (;;) {
3165 K k = (K) s.readObject();
3166 V v = (V) s.readObject();
3167 if (k != null && v != null) {
3168 int h = spread(k.hashCode());
3169 p = new Node<V>(h, k, v, p);
3170 ++size;
3171 }
3172 else
3173 break;
3174 }
3175 if (p != null) {
3176 boolean init = false;
3177 int n;
3178 if (size >= (long)(MAXIMUM_CAPACITY >>> 1))
3179 n = MAXIMUM_CAPACITY;
3180 else {
3181 int sz = (int)size;
3182 n = tableSizeFor(sz + (sz >>> 1) + 1);
3183 }
3184 int sc = sizeCtl;
3185 boolean collide = false;
3186 if (n > sc &&
3187 U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
3188 try {
3189 if (table == null) {
3190 init = true;
3191 @SuppressWarnings("rawtypes") Node[] rt = new Node[n];
3192 Node<V>[] tab = (Node<V>[])rt;
3193 int mask = n - 1;
3194 while (p != null) {
3195 int j = p.hash & mask;
3196 Node<V> next = p.next;
3197 Node<V> q = p.next = tabAt(tab, j);
3198 setTabAt(tab, j, p);
3199 if (!collide && q != null && q.hash == p.hash)
3200 collide = true;
3201 p = next;
3202 }
3203 table = tab;
3204 addCount(size, -1);
3205 sc = n - (n >>> 2);
3206 }
3207 } finally {
3208 sizeCtl = sc;
3209 }
3210 if (collide) { // rescan and convert to TreeBins
3211 Node<V>[] tab = table;
3212 for (int i = 0; i < tab.length; ++i) {
3213 int c = 0;
3214 for (Node<V> e = tabAt(tab, i); e != null; e = e.next) {
3215 if (++c > TREE_THRESHOLD &&
3216 (e.key instanceof Comparable)) {
3217 replaceWithTreeBin(tab, i, e.key);
3218 break;
3219 }
3220 }
3221 }
3222 }
3223 }
3224 if (!init) { // Can only happen if unsafely published.
3225 while (p != null) {
3226 internalPut((K)p.key, p.val, false);
3227 p = p.next;
3228 }
3229 }
3230 }
3231 }
3232
3233 // -------------------------------------------------------
3234
3235 // Sequential bulk operations
3236
3237 /**
3238 * Performs the given action for each (key, value).
3239 *
3240 * @param action the action
3241 */
3242 @SuppressWarnings("unchecked") public void forEachSequentially
3243 (BiBlock<? super K, ? super V> action) {
3244 if (action == null) throw new NullPointerException();
3245 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3246 V v;
3247 while ((v = it.advance()) != null)
3248 action.accept((K)it.nextKey, v);
3249 }
3250
3251 /**
3252 * Performs the given action for each non-null transformation
3253 * of each (key, value).
3254 *
3255 * @param transformer a function returning the transformation
3256 * for an element, or null of there is no transformation (in
3257 * which case the action is not applied).
3258 * @param action the action
3259 */
3260 @SuppressWarnings("unchecked") public <U> void forEachSequentially
3261 (BiFunction<? super K, ? super V, ? extends U> transformer,
3262 Block<? super U> action) {
3263 if (transformer == null || action == null)
3264 throw new NullPointerException();
3265 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3266 V v; U u;
3267 while ((v = it.advance()) != null) {
3268 if ((u = transformer.apply((K)it.nextKey, v)) != null)
3269 action.accept(u);
3270 }
3271 }
3272
3273 /**
3274 * Returns a non-null result from applying the given search
3275 * function on each (key, value), or null if none.
3276 *
3277 * @param searchFunction a function returning a non-null
3278 * result on success, else null
3279 * @return a non-null result from applying the given search
3280 * function on each (key, value), or null if none
3281 */
3282 @SuppressWarnings("unchecked") public <U> U searchSequentially
3283 (BiFunction<? super K, ? super V, ? extends U> searchFunction) {
3284 if (searchFunction == null) throw new NullPointerException();
3285 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3286 V v; U u;
3287 while ((v = it.advance()) != null) {
3288 if ((u = searchFunction.apply((K)it.nextKey, v)) != null)
3289 return u;
3290 }
3291 return null;
3292 }
3293
3294 /**
3295 * Returns the result of accumulating the given transformation
3296 * of all (key, value) pairs using the given reducer to
3297 * combine values, or null if none.
3298 *
3299 * @param transformer a function returning the transformation
3300 * for an element, or null of there is no transformation (in
3301 * which case it is not combined).
3302 * @param reducer a commutative associative combining function
3303 * @return the result of accumulating the given transformation
3304 * of all (key, value) pairs
3305 */
3306 @SuppressWarnings("unchecked") public <U> U reduceSequentially
3307 (BiFunction<? super K, ? super V, ? extends U> transformer,
3308 BiFunction<? super U, ? super U, ? extends U> reducer) {
3309 if (transformer == null || reducer == null)
3310 throw new NullPointerException();
3311 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3312 U r = null, u; V v;
3313 while ((v = it.advance()) != null) {
3314 if ((u = transformer.apply((K)it.nextKey, v)) != null)
3315 r = (r == null) ? u : reducer.apply(r, u);
3316 }
3317 return r;
3318 }
3319
3320 /**
3321 * Returns the result of accumulating the given transformation
3322 * of all (key, value) pairs using the given reducer to
3323 * combine values, and the given basis as an identity value.
3324 *
3325 * @param transformer a function returning the transformation
3326 * for an element
3327 * @param basis the identity (initial default value) for the reduction
3328 * @param reducer a commutative associative combining function
3329 * @return the result of accumulating the given transformation
3330 * of all (key, value) pairs
3331 */
3332 @SuppressWarnings("unchecked") public double reduceToDoubleSequentially
3333 (DoubleBiFunction<? super K, ? super V> transformer,
3334 double basis,
3335 DoubleBinaryOperator reducer) {
3336 if (transformer == null || reducer == null)
3337 throw new NullPointerException();
3338 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3339 double r = basis; V v;
3340 while ((v = it.advance()) != null)
3341 r = reducer.applyAsDouble(r, transformer.applyAsDouble((K)it.nextKey, v));
3342 return r;
3343 }
3344
3345 /**
3346 * Returns the result of accumulating the given transformation
3347 * of all (key, value) pairs using the given reducer to
3348 * combine values, and the given basis as an identity value.
3349 *
3350 * @param transformer a function returning the transformation
3351 * for an element
3352 * @param basis the identity (initial default value) for the reduction
3353 * @param reducer a commutative associative combining function
3354 * @return the result of accumulating the given transformation
3355 * of all (key, value) pairs
3356 */
3357 @SuppressWarnings("unchecked") public long reduceToLongSequentially
3358 (LongBiFunction<? super K, ? super V> transformer,
3359 long basis,
3360 LongBinaryOperator reducer) {
3361 if (transformer == null || reducer == null)
3362 throw new NullPointerException();
3363 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3364 long r = basis; V v;
3365 while ((v = it.advance()) != null)
3366 r = reducer.applyAsLong(r, transformer.applyAsLong((K)it.nextKey, v));
3367 return r;
3368 }
3369
3370 /**
3371 * Returns the result of accumulating the given transformation
3372 * of all (key, value) pairs using the given reducer to
3373 * combine values, and the given basis as an identity value.
3374 *
3375 * @param transformer a function returning the transformation
3376 * for an element
3377 * @param basis the identity (initial default value) for the reduction
3378 * @param reducer a commutative associative combining function
3379 * @return the result of accumulating the given transformation
3380 * of all (key, value) pairs
3381 */
3382 @SuppressWarnings("unchecked") public int reduceToIntSequentially
3383 (IntBiFunction<? super K, ? super V> transformer,
3384 int basis,
3385 IntBinaryOperator reducer) {
3386 if (transformer == null || reducer == null)
3387 throw new NullPointerException();
3388 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3389 int r = basis; V v;
3390 while ((v = it.advance()) != null)
3391 r = reducer.applyAsInt(r, transformer.applyAsInt((K)it.nextKey, v));
3392 return r;
3393 }
3394
3395 /**
3396 * Performs the given action for each key.
3397 *
3398 * @param action the action
3399 */
3400 @SuppressWarnings("unchecked") public void forEachKeySequentially
3401 (Block<? super K> action) {
3402 if (action == null) throw new NullPointerException();
3403 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3404 while (it.advance() != null)
3405 action.accept((K)it.nextKey);
3406 }
3407
3408 /**
3409 * Performs the given action for each non-null transformation
3410 * of each key.
3411 *
3412 * @param transformer a function returning the transformation
3413 * for an element, or null of there is no transformation (in
3414 * which case the action is not applied).
3415 * @param action the action
3416 */
3417 @SuppressWarnings("unchecked") public <U> void forEachKeySequentially
3418 (Function<? super K, ? extends U> transformer,
3419 Block<? super U> action) {
3420 if (transformer == null || action == null)
3421 throw new NullPointerException();
3422 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3423 U u;
3424 while (it.advance() != null) {
3425 if ((u = transformer.apply((K)it.nextKey)) != null)
3426 action.accept(u);
3427 }
3428 ForkJoinTasks.forEachKey
3429 (this, transformer, action).invoke();
3430 }
3431
3432 /**
3433 * Returns a non-null result from applying the given search
3434 * function on each key, or null if none.
3435 *
3436 * @param searchFunction a function returning a non-null
3437 * result on success, else null
3438 * @return a non-null result from applying the given search
3439 * function on each key, or null if none
3440 */
3441 @SuppressWarnings("unchecked") public <U> U searchKeysSequentially
3442 (Function<? super K, ? extends U> searchFunction) {
3443 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3444 U u;
3445 while (it.advance() != null) {
3446 if ((u = searchFunction.apply((K)it.nextKey)) != null)
3447 return u;
3448 }
3449 return null;
3450 }
3451
3452 /**
3453 * Returns the result of accumulating all keys using the given
3454 * reducer to combine values, or null if none.
3455 *
3456 * @param reducer a commutative associative combining function
3457 * @return the result of accumulating all keys using the given
3458 * reducer to combine values, or null if none
3459 */
3460 @SuppressWarnings("unchecked") public K reduceKeysSequentially
3461 (BiFunction<? super K, ? super K, ? extends K> reducer) {
3462 if (reducer == null) throw new NullPointerException();
3463 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3464 K r = null;
3465 while (it.advance() != null) {
3466 K u = (K)it.nextKey;
3467 r = (r == null) ? u : reducer.apply(r, u);
3468 }
3469 return r;
3470 }
3471
3472 /**
3473 * Returns the result of accumulating the given transformation
3474 * of all keys using the given reducer to combine values, or
3475 * null if none.
3476 *
3477 * @param transformer a function returning the transformation
3478 * for an element, or null of there is no transformation (in
3479 * which case it is not combined).
3480 * @param reducer a commutative associative combining function
3481 * @return the result of accumulating the given transformation
3482 * of all keys
3483 */
3484 @SuppressWarnings("unchecked") public <U> U reduceKeysSequentially
3485 (Function<? super K, ? extends U> transformer,
3486 BiFunction<? super U, ? super U, ? extends U> reducer) {
3487 if (transformer == null || reducer == null)
3488 throw new NullPointerException();
3489 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3490 U r = null, u;
3491 while (it.advance() != null) {
3492 if ((u = transformer.apply((K)it.nextKey)) != null)
3493 r = (r == null) ? u : reducer.apply(r, u);
3494 }
3495 return r;
3496 }
3497
3498 /**
3499 * Returns the result of accumulating the given transformation
3500 * of all keys using the given reducer to combine values, and
3501 * the given basis as an identity value.
3502 *
3503 * @param transformer a function returning the transformation
3504 * for an element
3505 * @param basis the identity (initial default value) for the reduction
3506 * @param reducer a commutative associative combining function
3507 * @return the result of accumulating the given transformation
3508 * of all keys
3509 */
3510 @SuppressWarnings("unchecked") public double reduceKeysToDoubleSequentially
3511 (DoubleFunction<? super K> transformer,
3512 double basis,
3513 DoubleBinaryOperator reducer) {
3514 if (transformer == null || reducer == null)
3515 throw new NullPointerException();
3516 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3517 double r = basis;
3518 while (it.advance() != null)
3519 r = reducer.applyAsDouble(r, transformer.applyAsDouble((K)it.nextKey));
3520 return r;
3521 }
3522
3523 /**
3524 * Returns the result of accumulating the given transformation
3525 * of all keys using the given reducer to combine values, and
3526 * the given basis as an identity value.
3527 *
3528 * @param transformer a function returning the transformation
3529 * for an element
3530 * @param basis the identity (initial default value) for the reduction
3531 * @param reducer a commutative associative combining function
3532 * @return the result of accumulating the given transformation
3533 * of all keys
3534 */
3535 @SuppressWarnings("unchecked") public long reduceKeysToLongSequentially
3536 (LongFunction<? super K> transformer,
3537 long basis,
3538 LongBinaryOperator reducer) {
3539 if (transformer == null || reducer == null)
3540 throw new NullPointerException();
3541 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3542 long r = basis;
3543 while (it.advance() != null)
3544 r = reducer.applyAsLong(r, transformer.applyAsLong((K)it.nextKey));
3545 return r;
3546 }
3547
3548 /**
3549 * Returns the result of accumulating the given transformation
3550 * of all keys using the given reducer to combine values, and
3551 * the given basis as an identity value.
3552 *
3553 * @param transformer a function returning the transformation
3554 * for an element
3555 * @param basis the identity (initial default value) for the reduction
3556 * @param reducer a commutative associative combining function
3557 * @return the result of accumulating the given transformation
3558 * of all keys
3559 */
3560 @SuppressWarnings("unchecked") public int reduceKeysToIntSequentially
3561 (IntFunction<? super K> transformer,
3562 int basis,
3563 IntBinaryOperator reducer) {
3564 if (transformer == null || reducer == null)
3565 throw new NullPointerException();
3566 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3567 int r = basis;
3568 while (it.advance() != null)
3569 r = reducer.applyAsInt(r, transformer.applyAsInt((K)it.nextKey));
3570 return r;
3571 }
3572
3573 /**
3574 * Performs the given action for each value.
3575 *
3576 * @param action the action
3577 */
3578 public void forEachValueSequentially(Block<? super V> action) {
3579 if (action == null) throw new NullPointerException();
3580 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3581 V v;
3582 while ((v = it.advance()) != null)
3583 action.accept(v);
3584 }
3585
3586 /**
3587 * Performs the given action for each non-null transformation
3588 * of each value.
3589 *
3590 * @param transformer a function returning the transformation
3591 * for an element, or null of there is no transformation (in
3592 * which case the action is not applied).
3593 */
3594 public <U> void forEachValueSequentially
3595 (Function<? super V, ? extends U> transformer,
3596 Block<? super U> action) {
3597 if (transformer == null || action == null)
3598 throw new NullPointerException();
3599 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3600 V v; U u;
3601 while ((v = it.advance()) != null) {
3602 if ((u = transformer.apply(v)) != null)
3603 action.accept(u);
3604 }
3605 }
3606
3607 /**
3608 * Returns a non-null result from applying the given search
3609 * function on each value, or null if none.
3610 *
3611 * @param searchFunction a function returning a non-null
3612 * result on success, else null
3613 * @return a non-null result from applying the given search
3614 * function on each value, or null if none
3615 */
3616 public <U> U searchValuesSequentially
3617 (Function<? super V, ? extends U> searchFunction) {
3618 if (searchFunction == null) throw new NullPointerException();
3619 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3620 V v; U u;
3621 while ((v = it.advance()) != null) {
3622 if ((u = searchFunction.apply(v)) != null)
3623 return u;
3624 }
3625 return null;
3626 }
3627
3628 /**
3629 * Returns the result of accumulating all values using the
3630 * given reducer to combine values, or null if none.
3631 *
3632 * @param reducer a commutative associative combining function
3633 * @return the result of accumulating all values
3634 */
3635 public V reduceValuesSequentially
3636 (BiFunction<? super V, ? super V, ? extends V> reducer) {
3637 if (reducer == null) throw new NullPointerException();
3638 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3639 V r = null; V v;
3640 while ((v = it.advance()) != null)
3641 r = (r == null) ? v : reducer.apply(r, v);
3642 return r;
3643 }
3644
3645 /**
3646 * Returns the result of accumulating the given transformation
3647 * of all values using the given reducer to combine values, or
3648 * null if none.
3649 *
3650 * @param transformer a function returning the transformation
3651 * for an element, or null of there is no transformation (in
3652 * which case it is not combined).
3653 * @param reducer a commutative associative combining function
3654 * @return the result of accumulating the given transformation
3655 * of all values
3656 */
3657 public <U> U reduceValuesSequentially
3658 (Function<? super V, ? extends U> transformer,
3659 BiFunction<? super U, ? super U, ? extends U> reducer) {
3660 if (transformer == null || reducer == null)
3661 throw new NullPointerException();
3662 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3663 U r = null, u; V v;
3664 while ((v = it.advance()) != null) {
3665 if ((u = transformer.apply(v)) != null)
3666 r = (r == null) ? u : reducer.apply(r, u);
3667 }
3668 return r;
3669 }
3670
3671 /**
3672 * Returns the result of accumulating the given transformation
3673 * of all values using the given reducer to combine values,
3674 * and the given basis as an identity value.
3675 *
3676 * @param transformer a function returning the transformation
3677 * for an element
3678 * @param basis the identity (initial default value) for the reduction
3679 * @param reducer a commutative associative combining function
3680 * @return the result of accumulating the given transformation
3681 * of all values
3682 */
3683 public double reduceValuesToDoubleSequentially
3684 (DoubleFunction<? super V> transformer,
3685 double basis,
3686 DoubleBinaryOperator reducer) {
3687 if (transformer == null || reducer == null)
3688 throw new NullPointerException();
3689 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3690 double r = basis; V v;
3691 while ((v = it.advance()) != null)
3692 r = reducer.applyAsDouble(r, transformer.applyAsDouble(v));
3693 return r;
3694 }
3695
3696 /**
3697 * Returns the result of accumulating the given transformation
3698 * of all values using the given reducer to combine values,
3699 * and the given basis as an identity value.
3700 *
3701 * @param transformer a function returning the transformation
3702 * for an element
3703 * @param basis the identity (initial default value) for the reduction
3704 * @param reducer a commutative associative combining function
3705 * @return the result of accumulating the given transformation
3706 * of all values
3707 */
3708 public long reduceValuesToLongSequentially
3709 (LongFunction<? super V> transformer,
3710 long basis,
3711 LongBinaryOperator reducer) {
3712 if (transformer == null || reducer == null)
3713 throw new NullPointerException();
3714 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3715 long r = basis; V v;
3716 while ((v = it.advance()) != null)
3717 r = reducer.applyAsLong(r, transformer.applyAsLong(v));
3718 return r;
3719 }
3720
3721 /**
3722 * Returns the result of accumulating the given transformation
3723 * of all values using the given reducer to combine values,
3724 * and the given basis as an identity value.
3725 *
3726 * @param transformer a function returning the transformation
3727 * for an element
3728 * @param basis the identity (initial default value) for the reduction
3729 * @param reducer a commutative associative combining function
3730 * @return the result of accumulating the given transformation
3731 * of all values
3732 */
3733 public int reduceValuesToIntSequentially
3734 (IntFunction<? super V> transformer,
3735 int basis,
3736 IntBinaryOperator reducer) {
3737 if (transformer == null || reducer == null)
3738 throw new NullPointerException();
3739 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3740 int r = basis; V v;
3741 while ((v = it.advance()) != null)
3742 r = reducer.applyAsInt(r, transformer.applyAsInt(v));
3743 return r;
3744 }
3745
3746 /**
3747 * Performs the given action for each entry.
3748 *
3749 * @param action the action
3750 */
3751 @SuppressWarnings("unchecked") public void forEachEntrySequentially
3752 (Block<? super Map.Entry<K,V>> action) {
3753 if (action == null) throw new NullPointerException();
3754 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3755 V v;
3756 while ((v = it.advance()) != null)
3757 action.accept(entryFor((K)it.nextKey, v));
3758 }
3759
3760 /**
3761 * Performs the given action for each non-null transformation
3762 * of each entry.
3763 *
3764 * @param transformer a function returning the transformation
3765 * for an element, or null of there is no transformation (in
3766 * which case the action is not applied).
3767 * @param action the action
3768 */
3769 @SuppressWarnings("unchecked") public <U> void forEachEntrySequentially
3770 (Function<Map.Entry<K,V>, ? extends U> transformer,
3771 Block<? super U> action) {
3772 if (transformer == null || action == null)
3773 throw new NullPointerException();
3774 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3775 V v; U u;
3776 while ((v = it.advance()) != null) {
3777 if ((u = transformer.apply(entryFor((K)it.nextKey, v))) != null)
3778 action.accept(u);
3779 }
3780 }
3781
3782 /**
3783 * Returns a non-null result from applying the given search
3784 * function on each entry, or null if none.
3785 *
3786 * @param searchFunction a function returning a non-null
3787 * result on success, else null
3788 * @return a non-null result from applying the given search
3789 * function on each entry, or null if none
3790 */
3791 @SuppressWarnings("unchecked") public <U> U searchEntriesSequentially
3792 (Function<Map.Entry<K,V>, ? extends U> searchFunction) {
3793 if (searchFunction == null) throw new NullPointerException();
3794 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3795 V v; U u;
3796 while ((v = it.advance()) != null) {
3797 if ((u = searchFunction.apply(entryFor((K)it.nextKey, v))) != null)
3798 return u;
3799 }
3800 return null;
3801 }
3802
3803 /**
3804 * Returns the result of accumulating all entries using the
3805 * given reducer to combine values, or null if none.
3806 *
3807 * @param reducer a commutative associative combining function
3808 * @return the result of accumulating all entries
3809 */
3810 @SuppressWarnings("unchecked") public Map.Entry<K,V> reduceEntriesSequentially
3811 (BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
3812 if (reducer == null) throw new NullPointerException();
3813 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3814 Map.Entry<K,V> r = null; V v;
3815 while ((v = it.advance()) != null) {
3816 Map.Entry<K,V> u = entryFor((K)it.nextKey, v);
3817 r = (r == null) ? u : reducer.apply(r, u);
3818 }
3819 return r;
3820 }
3821
3822 /**
3823 * Returns the result of accumulating the given transformation
3824 * of all entries using the given reducer to combine values,
3825 * or null if none.
3826 *
3827 * @param transformer a function returning the transformation
3828 * for an element, or null of there is no transformation (in
3829 * which case it is not combined).
3830 * @param reducer a commutative associative combining function
3831 * @return the result of accumulating the given transformation
3832 * of all entries
3833 */
3834 @SuppressWarnings("unchecked") public <U> U reduceEntriesSequentially
3835 (Function<Map.Entry<K,V>, ? extends U> transformer,
3836 BiFunction<? super U, ? super U, ? extends U> reducer) {
3837 if (transformer == null || reducer == null)
3838 throw new NullPointerException();
3839 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3840 U r = null, u; V v;
3841 while ((v = it.advance()) != null) {
3842 if ((u = transformer.apply(entryFor((K)it.nextKey, v))) != null)
3843 r = (r == null) ? u : reducer.apply(r, u);
3844 }
3845 return r;
3846 }
3847
3848 /**
3849 * Returns the result of accumulating the given transformation
3850 * of all entries using the given reducer to combine values,
3851 * and the given basis as an identity value.
3852 *
3853 * @param transformer a function returning the transformation
3854 * for an element
3855 * @param basis the identity (initial default value) for the reduction
3856 * @param reducer a commutative associative combining function
3857 * @return the result of accumulating the given transformation
3858 * of all entries
3859 */
3860 @SuppressWarnings("unchecked") public double reduceEntriesToDoubleSequentially
3861 (DoubleFunction<Map.Entry<K,V>> transformer,
3862 double basis,
3863 DoubleBinaryOperator reducer) {
3864 if (transformer == null || reducer == null)
3865 throw new NullPointerException();
3866 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3867 double r = basis; V v;
3868 while ((v = it.advance()) != null)
3869 r = reducer.applyAsDouble(r, transformer.applyAsDouble(entryFor((K)it.nextKey, v)));
3870 return r;
3871 }
3872
3873 /**
3874 * Returns the result of accumulating the given transformation
3875 * of all entries using the given reducer to combine values,
3876 * and the given basis as an identity value.
3877 *
3878 * @param transformer a function returning the transformation
3879 * for an element
3880 * @param basis the identity (initial default value) for the reduction
3881 * @param reducer a commutative associative combining function
3882 * @return the result of accumulating the given transformation
3883 * of all entries
3884 */
3885 @SuppressWarnings("unchecked") public long reduceEntriesToLongSequentially
3886 (LongFunction<Map.Entry<K,V>> transformer,
3887 long basis,
3888 LongBinaryOperator reducer) {
3889 if (transformer == null || reducer == null)
3890 throw new NullPointerException();
3891 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3892 long r = basis; V v;
3893 while ((v = it.advance()) != null)
3894 r = reducer.applyAsLong(r, transformer.applyAsLong(entryFor((K)it.nextKey, v)));
3895 return r;
3896 }
3897
3898 /**
3899 * Returns the result of accumulating the given transformation
3900 * of all entries using the given reducer to combine values,
3901 * and the given basis as an identity value.
3902 *
3903 * @param transformer a function returning the transformation
3904 * for an element
3905 * @param basis the identity (initial default value) for the reduction
3906 * @param reducer a commutative associative combining function
3907 * @return the result of accumulating the given transformation
3908 * of all entries
3909 */
3910 @SuppressWarnings("unchecked") public int reduceEntriesToIntSequentially
3911 (IntFunction<Map.Entry<K,V>> transformer,
3912 int basis,
3913 IntBinaryOperator reducer) {
3914 if (transformer == null || reducer == null)
3915 throw new NullPointerException();
3916 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3917 int r = basis; V v;
3918 while ((v = it.advance()) != null)
3919 r = reducer.applyAsInt(r, transformer.applyAsInt(entryFor((K)it.nextKey, v)));
3920 return r;
3921 }
3922
3923 // Parallel bulk operations
3924
3925 /**
3926 * Performs the given action for each (key, value).
3927 *
3928 * @param action the action
3929 */
3930 public void forEachInParallel(BiBlock<? super K,? super V> action) {
3931 ForkJoinTasks.forEach
3932 (this, action).invoke();
3933 }
3934
3935 /**
3936 * Performs the given action for each non-null transformation
3937 * of each (key, value).
3938 *
3939 * @param transformer a function returning the transformation
3940 * for an element, or null of there is no transformation (in
3941 * which case the action is not applied).
3942 * @param action the action
3943 */
3944 public <U> void forEachInParallel
3945 (BiFunction<? super K, ? super V, ? extends U> transformer,
3946 Block<? super U> action) {
3947 ForkJoinTasks.forEach
3948 (this, transformer, action).invoke();
3949 }
3950
3951 /**
3952 * Returns a non-null result from applying the given search
3953 * function on each (key, value), or null if none. Upon
3954 * success, further element processing is suppressed and the
3955 * results of any other parallel invocations of the search
3956 * function are ignored.
3957 *
3958 * @param searchFunction a function returning a non-null
3959 * result on success, else null
3960 * @return a non-null result from applying the given search
3961 * function on each (key, value), or null if none
3962 */
3963 public <U> U searchInParallel
3964 (BiFunction<? super K, ? super V, ? extends U> searchFunction) {
3965 return ForkJoinTasks.search
3966 (this, searchFunction).invoke();
3967 }
3968
3969 /**
3970 * Returns the result of accumulating the given transformation
3971 * of all (key, value) pairs using the given reducer to
3972 * combine values, or null if none.
3973 *
3974 * @param transformer a function returning the transformation
3975 * for an element, or null of there is no transformation (in
3976 * which case it is not combined).
3977 * @param reducer a commutative associative combining function
3978 * @return the result of accumulating the given transformation
3979 * of all (key, value) pairs
3980 */
3981 public <U> U reduceInParallel
3982 (BiFunction<? super K, ? super V, ? extends U> transformer,
3983 BiFunction<? super U, ? super U, ? extends U> reducer) {
3984 return ForkJoinTasks.reduce
3985 (this, transformer, reducer).invoke();
3986 }
3987
3988 /**
3989 * Returns the result of accumulating the given transformation
3990 * of all (key, value) pairs using the given reducer to
3991 * combine values, and the given basis as an identity value.
3992 *
3993 * @param transformer a function returning the transformation
3994 * for an element
3995 * @param basis the identity (initial default value) for the reduction
3996 * @param reducer a commutative associative combining function
3997 * @return the result of accumulating the given transformation
3998 * of all (key, value) pairs
3999 */
4000 public double reduceToDoubleInParallel
4001 (DoubleBiFunction<? super K, ? super V> transformer,
4002 double basis,
4003 DoubleBinaryOperator reducer) {
4004 return ForkJoinTasks.reduceToDouble
4005 (this, transformer, basis, reducer).invoke();
4006 }
4007
4008 /**
4009 * Returns the result of accumulating the given transformation
4010 * of all (key, value) pairs using the given reducer to
4011 * combine values, and the given basis as an identity value.
4012 *
4013 * @param transformer a function returning the transformation
4014 * for an element
4015 * @param basis the identity (initial default value) for the reduction
4016 * @param reducer a commutative associative combining function
4017 * @return the result of accumulating the given transformation
4018 * of all (key, value) pairs
4019 */
4020 public long reduceToLongInParallel
4021 (LongBiFunction<? super K, ? super V> transformer,
4022 long basis,
4023 LongBinaryOperator reducer) {
4024 return ForkJoinTasks.reduceToLong
4025 (this, transformer, basis, reducer).invoke();
4026 }
4027
4028 /**
4029 * Returns the result of accumulating the given transformation
4030 * of all (key, value) pairs using the given reducer to
4031 * combine values, and the given basis as an identity value.
4032 *
4033 * @param transformer a function returning the transformation
4034 * for an element
4035 * @param basis the identity (initial default value) for the reduction
4036 * @param reducer a commutative associative combining function
4037 * @return the result of accumulating the given transformation
4038 * of all (key, value) pairs
4039 */
4040 public int reduceToIntInParallel
4041 (IntBiFunction<? super K, ? super V> transformer,
4042 int basis,
4043 IntBinaryOperator reducer) {
4044 return ForkJoinTasks.reduceToInt
4045 (this, transformer, basis, reducer).invoke();
4046 }
4047
4048 /**
4049 * Performs the given action for each key.
4050 *
4051 * @param action the action
4052 */
4053 public void forEachKeyInParallel(Block<? super K> action) {
4054 ForkJoinTasks.forEachKey
4055 (this, action).invoke();
4056 }
4057
4058 /**
4059 * Performs the given action for each non-null transformation
4060 * of each key.
4061 *
4062 * @param transformer a function returning the transformation
4063 * for an element, or null of there is no transformation (in
4064 * which case the action is not applied).
4065 * @param action the action
4066 */
4067 public <U> void forEachKeyInParallel
4068 (Function<? super K, ? extends U> transformer,
4069 Block<? super U> action) {
4070 ForkJoinTasks.forEachKey
4071 (this, transformer, action).invoke();
4072 }
4073
4074 /**
4075 * Returns a non-null result from applying the given search
4076 * function on each key, or null if none. Upon success,
4077 * further element processing is suppressed and the results of
4078 * any other parallel invocations of the search function are
4079 * ignored.
4080 *
4081 * @param searchFunction a function returning a non-null
4082 * result on success, else null
4083 * @return a non-null result from applying the given search
4084 * function on each key, or null if none
4085 */
4086 public <U> U searchKeysInParallel
4087 (Function<? super K, ? extends U> searchFunction) {
4088 return ForkJoinTasks.searchKeys
4089 (this, searchFunction).invoke();
4090 }
4091
4092 /**
4093 * Returns the result of accumulating all keys using the given
4094 * reducer to combine values, or null if none.
4095 *
4096 * @param reducer a commutative associative combining function
4097 * @return the result of accumulating all keys using the given
4098 * reducer to combine values, or null if none
4099 */
4100 public K reduceKeysInParallel
4101 (BiFunction<? super K, ? super K, ? extends K> reducer) {
4102 return ForkJoinTasks.reduceKeys
4103 (this, reducer).invoke();
4104 }
4105
4106 /**
4107 * Returns the result of accumulating the given transformation
4108 * of all keys using the given reducer to combine values, or
4109 * null if none.
4110 *
4111 * @param transformer a function returning the transformation
4112 * for an element, or null of there is no transformation (in
4113 * which case it is not combined).
4114 * @param reducer a commutative associative combining function
4115 * @return the result of accumulating the given transformation
4116 * of all keys
4117 */
4118 public <U> U reduceKeysInParallel
4119 (Function<? super K, ? extends U> transformer,
4120 BiFunction<? super U, ? super U, ? extends U> reducer) {
4121 return ForkJoinTasks.reduceKeys
4122 (this, transformer, reducer).invoke();
4123 }
4124
4125 /**
4126 * Returns the result of accumulating the given transformation
4127 * of all keys using the given reducer to combine values, and
4128 * the given basis as an identity value.
4129 *
4130 * @param transformer a function returning the transformation
4131 * for an element
4132 * @param basis the identity (initial default value) for the reduction
4133 * @param reducer a commutative associative combining function
4134 * @return the result of accumulating the given transformation
4135 * of all keys
4136 */
4137 public double reduceKeysToDoubleInParallel
4138 (DoubleFunction<? super K> transformer,
4139 double basis,
4140 DoubleBinaryOperator reducer) {
4141 return ForkJoinTasks.reduceKeysToDouble
4142 (this, transformer, basis, reducer).invoke();
4143 }
4144
4145 /**
4146 * Returns the result of accumulating the given transformation
4147 * of all keys using the given reducer to combine values, and
4148 * the given basis as an identity value.
4149 *
4150 * @param transformer a function returning the transformation
4151 * for an element
4152 * @param basis the identity (initial default value) for the reduction
4153 * @param reducer a commutative associative combining function
4154 * @return the result of accumulating the given transformation
4155 * of all keys
4156 */
4157 public long reduceKeysToLongInParallel
4158 (LongFunction<? super K> transformer,
4159 long basis,
4160 LongBinaryOperator reducer) {
4161 return ForkJoinTasks.reduceKeysToLong
4162 (this, transformer, basis, reducer).invoke();
4163 }
4164
4165 /**
4166 * Returns the result of accumulating the given transformation
4167 * of all keys using the given reducer to combine values, and
4168 * the given basis as an identity value.
4169 *
4170 * @param transformer a function returning the transformation
4171 * for an element
4172 * @param basis the identity (initial default value) for the reduction
4173 * @param reducer a commutative associative combining function
4174 * @return the result of accumulating the given transformation
4175 * of all keys
4176 */
4177 public int reduceKeysToIntInParallel
4178 (IntFunction<? super K> transformer,
4179 int basis,
4180 IntBinaryOperator reducer) {
4181 return ForkJoinTasks.reduceKeysToInt
4182 (this, transformer, basis, reducer).invoke();
4183 }
4184
4185 /**
4186 * Performs the given action for each value.
4187 *
4188 * @param action the action
4189 */
4190 public void forEachValueInParallel(Block<? super V> action) {
4191 ForkJoinTasks.forEachValue
4192 (this, action).invoke();
4193 }
4194
4195 /**
4196 * Performs the given action for each non-null transformation
4197 * of each value.
4198 *
4199 * @param transformer a function returning the transformation
4200 * for an element, or null of there is no transformation (in
4201 * which case the action is not applied).
4202 */
4203 public <U> void forEachValueInParallel
4204 (Function<? super V, ? extends U> transformer,
4205 Block<? super U> action) {
4206 ForkJoinTasks.forEachValue
4207 (this, transformer, action).invoke();
4208 }
4209
4210 /**
4211 * Returns a non-null result from applying the given search
4212 * function on each value, or null if none. Upon success,
4213 * further element processing is suppressed and the results of
4214 * any other parallel invocations of the search function are
4215 * ignored.
4216 *
4217 * @param searchFunction a function returning a non-null
4218 * result on success, else null
4219 * @return a non-null result from applying the given search
4220 * function on each value, or null if none
4221 */
4222 public <U> U searchValuesInParallel
4223 (Function<? super V, ? extends U> searchFunction) {
4224 return ForkJoinTasks.searchValues
4225 (this, searchFunction).invoke();
4226 }
4227
4228 /**
4229 * Returns the result of accumulating all values using the
4230 * given reducer to combine values, or null if none.
4231 *
4232 * @param reducer a commutative associative combining function
4233 * @return the result of accumulating all values
4234 */
4235 public V reduceValuesInParallel
4236 (BiFunction<? super V, ? super V, ? extends V> reducer) {
4237 return ForkJoinTasks.reduceValues
4238 (this, reducer).invoke();
4239 }
4240
4241 /**
4242 * Returns the result of accumulating the given transformation
4243 * of all values using the given reducer to combine values, or
4244 * null if none.
4245 *
4246 * @param transformer a function returning the transformation
4247 * for an element, or null of there is no transformation (in
4248 * which case it is not combined).
4249 * @param reducer a commutative associative combining function
4250 * @return the result of accumulating the given transformation
4251 * of all values
4252 */
4253 public <U> U reduceValuesInParallel
4254 (Function<? super V, ? extends U> transformer,
4255 BiFunction<? super U, ? super U, ? extends U> reducer) {
4256 return ForkJoinTasks.reduceValues
4257 (this, transformer, reducer).invoke();
4258 }
4259
4260 /**
4261 * Returns the result of accumulating the given transformation
4262 * of all values using the given reducer to combine values,
4263 * and the given basis as an identity value.
4264 *
4265 * @param transformer a function returning the transformation
4266 * for an element
4267 * @param basis the identity (initial default value) for the reduction
4268 * @param reducer a commutative associative combining function
4269 * @return the result of accumulating the given transformation
4270 * of all values
4271 */
4272 public double reduceValuesToDoubleInParallel
4273 (DoubleFunction<? super V> transformer,
4274 double basis,
4275 DoubleBinaryOperator reducer) {
4276 return ForkJoinTasks.reduceValuesToDouble
4277 (this, transformer, basis, reducer).invoke();
4278 }
4279
4280 /**
4281 * Returns the result of accumulating the given transformation
4282 * of all values using the given reducer to combine values,
4283 * and the given basis as an identity value.
4284 *
4285 * @param transformer a function returning the transformation
4286 * for an element
4287 * @param basis the identity (initial default value) for the reduction
4288 * @param reducer a commutative associative combining function
4289 * @return the result of accumulating the given transformation
4290 * of all values
4291 */
4292 public long reduceValuesToLongInParallel
4293 (LongFunction<? super V> transformer,
4294 long basis,
4295 LongBinaryOperator reducer) {
4296 return ForkJoinTasks.reduceValuesToLong
4297 (this, transformer, basis, reducer).invoke();
4298 }
4299
4300 /**
4301 * Returns the result of accumulating the given transformation
4302 * of all values using the given reducer to combine values,
4303 * and the given basis as an identity value.
4304 *
4305 * @param transformer a function returning the transformation
4306 * for an element
4307 * @param basis the identity (initial default value) for the reduction
4308 * @param reducer a commutative associative combining function
4309 * @return the result of accumulating the given transformation
4310 * of all values
4311 */
4312 public int reduceValuesToIntInParallel
4313 (IntFunction<? super V> transformer,
4314 int basis,
4315 IntBinaryOperator reducer) {
4316 return ForkJoinTasks.reduceValuesToInt
4317 (this, transformer, basis, reducer).invoke();
4318 }
4319
4320 /**
4321 * Performs the given action for each entry.
4322 *
4323 * @param action the action
4324 */
4325 public void forEachEntryInParallel(Block<? super Map.Entry<K,V>> action) {
4326 ForkJoinTasks.forEachEntry
4327 (this, action).invoke();
4328 }
4329
4330 /**
4331 * Performs the given action for each non-null transformation
4332 * of each entry.
4333 *
4334 * @param transformer a function returning the transformation
4335 * for an element, or null of there is no transformation (in
4336 * which case the action is not applied).
4337 * @param action the action
4338 */
4339 public <U> void forEachEntryInParallel
4340 (Function<Map.Entry<K,V>, ? extends U> transformer,
4341 Block<? super U> action) {
4342 ForkJoinTasks.forEachEntry
4343 (this, transformer, action).invoke();
4344 }
4345
4346 /**
4347 * Returns a non-null result from applying the given search
4348 * function on each entry, or null if none. Upon success,
4349 * further element processing is suppressed and the results of
4350 * any other parallel invocations of the search function are
4351 * ignored.
4352 *
4353 * @param searchFunction a function returning a non-null
4354 * result on success, else null
4355 * @return a non-null result from applying the given search
4356 * function on each entry, or null if none
4357 */
4358 public <U> U searchEntriesInParallel
4359 (Function<Map.Entry<K,V>, ? extends U> searchFunction) {
4360 return ForkJoinTasks.searchEntries
4361 (this, searchFunction).invoke();
4362 }
4363
4364 /**
4365 * Returns the result of accumulating all entries using the
4366 * given reducer to combine values, or null if none.
4367 *
4368 * @param reducer a commutative associative combining function
4369 * @return the result of accumulating all entries
4370 */
4371 public Map.Entry<K,V> reduceEntriesInParallel
4372 (BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
4373 return ForkJoinTasks.reduceEntries
4374 (this, reducer).invoke();
4375 }
4376
4377 /**
4378 * Returns the result of accumulating the given transformation
4379 * of all entries using the given reducer to combine values,
4380 * or null if none.
4381 *
4382 * @param transformer a function returning the transformation
4383 * for an element, or null of there is no transformation (in
4384 * which case it is not combined).
4385 * @param reducer a commutative associative combining function
4386 * @return the result of accumulating the given transformation
4387 * of all entries
4388 */
4389 public <U> U reduceEntriesInParallel
4390 (Function<Map.Entry<K,V>, ? extends U> transformer,
4391 BiFunction<? super U, ? super U, ? extends U> reducer) {
4392 return ForkJoinTasks.reduceEntries
4393 (this, transformer, reducer).invoke();
4394 }
4395
4396 /**
4397 * Returns the result of accumulating the given transformation
4398 * of all entries using the given reducer to combine values,
4399 * and the given basis as an identity value.
4400 *
4401 * @param transformer a function returning the transformation
4402 * for an element
4403 * @param basis the identity (initial default value) for the reduction
4404 * @param reducer a commutative associative combining function
4405 * @return the result of accumulating the given transformation
4406 * of all entries
4407 */
4408 public double reduceEntriesToDoubleInParallel
4409 (DoubleFunction<Map.Entry<K,V>> transformer,
4410 double basis,
4411 DoubleBinaryOperator reducer) {
4412 return ForkJoinTasks.reduceEntriesToDouble
4413 (this, transformer, basis, reducer).invoke();
4414 }
4415
4416 /**
4417 * Returns the result of accumulating the given transformation
4418 * of all entries using the given reducer to combine values,
4419 * and the given basis as an identity value.
4420 *
4421 * @param transformer a function returning the transformation
4422 * for an element
4423 * @param basis the identity (initial default value) for the reduction
4424 * @param reducer a commutative associative combining function
4425 * @return the result of accumulating the given transformation
4426 * of all entries
4427 */
4428 public long reduceEntriesToLongInParallel
4429 (LongFunction<Map.Entry<K,V>> transformer,
4430 long basis,
4431 LongBinaryOperator reducer) {
4432 return ForkJoinTasks.reduceEntriesToLong
4433 (this, transformer, basis, reducer).invoke();
4434 }
4435
4436 /**
4437 * Returns the result of accumulating the given transformation
4438 * of all entries using the given reducer to combine values,
4439 * and the given basis as an identity value.
4440 *
4441 * @param transformer a function returning the transformation
4442 * for an element
4443 * @param basis the identity (initial default value) for the reduction
4444 * @param reducer a commutative associative combining function
4445 * @return the result of accumulating the given transformation
4446 * of all entries
4447 */
4448 public int reduceEntriesToIntInParallel
4449 (IntFunction<Map.Entry<K,V>> transformer,
4450 int basis,
4451 IntBinaryOperator reducer) {
4452 return ForkJoinTasks.reduceEntriesToInt
4453 (this, transformer, basis, reducer).invoke();
4454 }
4455
4456
4457 /* ----------------Views -------------- */
4458
4459 /**
4460 * Base class for views.
4461 */
4462 static abstract class CHMView<K, V> {
4463 final ConcurrentHashMap<K, V> map;
4464 CHMView(ConcurrentHashMap<K, V> map) { this.map = map; }
4465
4466 /**
4467 * Returns the map backing this view.
4468 *
4469 * @return the map backing this view
4470 */
4471 public ConcurrentHashMap<K,V> getMap() { return map; }
4472
4473 public final int size() { return map.size(); }
4474 public final boolean isEmpty() { return map.isEmpty(); }
4475 public final void clear() { map.clear(); }
4476
4477 // implementations below rely on concrete classes supplying these
4478 abstract public Iterator<?> iterator();
4479 abstract public boolean contains(Object o);
4480 abstract public boolean remove(Object o);
4481
4482 private static final String oomeMsg = "Required array size too large";
4483
4484 public final Object[] toArray() {
4485 long sz = map.mappingCount();
4486 if (sz > (long)(MAX_ARRAY_SIZE))
4487 throw new OutOfMemoryError(oomeMsg);
4488 int n = (int)sz;
4489 Object[] r = new Object[n];
4490 int i = 0;
4491 Iterator<?> it = iterator();
4492 while (it.hasNext()) {
4493 if (i == n) {
4494 if (n >= MAX_ARRAY_SIZE)
4495 throw new OutOfMemoryError(oomeMsg);
4496 if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
4497 n = MAX_ARRAY_SIZE;
4498 else
4499 n += (n >>> 1) + 1;
4500 r = Arrays.copyOf(r, n);
4501 }
4502 r[i++] = it.next();
4503 }
4504 return (i == n) ? r : Arrays.copyOf(r, i);
4505 }
4506
4507 @SuppressWarnings("unchecked") public final <T> T[] toArray(T[] a) {
4508 long sz = map.mappingCount();
4509 if (sz > (long)(MAX_ARRAY_SIZE))
4510 throw new OutOfMemoryError(oomeMsg);
4511 int m = (int)sz;
4512 T[] r = (a.length >= m) ? a :
4513 (T[])java.lang.reflect.Array
4514 .newInstance(a.getClass().getComponentType(), m);
4515 int n = r.length;
4516 int i = 0;
4517 Iterator<?> it = iterator();
4518 while (it.hasNext()) {
4519 if (i == n) {
4520 if (n >= MAX_ARRAY_SIZE)
4521 throw new OutOfMemoryError(oomeMsg);
4522 if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
4523 n = MAX_ARRAY_SIZE;
4524 else
4525 n += (n >>> 1) + 1;
4526 r = Arrays.copyOf(r, n);
4527 }
4528 r[i++] = (T)it.next();
4529 }
4530 if (a == r && i < n) {
4531 r[i] = null; // null-terminate
4532 return r;
4533 }
4534 return (i == n) ? r : Arrays.copyOf(r, i);
4535 }
4536
4537 public final int hashCode() {
4538 int h = 0;
4539 for (Iterator<?> it = iterator(); it.hasNext();)
4540 h += it.next().hashCode();
4541 return h;
4542 }
4543
4544 public final String toString() {
4545 StringBuilder sb = new StringBuilder();
4546 sb.append('[');
4547 Iterator<?> it = iterator();
4548 if (it.hasNext()) {
4549 for (;;) {
4550 Object e = it.next();
4551 sb.append(e == this ? "(this Collection)" : e);
4552 if (!it.hasNext())
4553 break;
4554 sb.append(',').append(' ');
4555 }
4556 }
4557 return sb.append(']').toString();
4558 }
4559
4560 public final boolean containsAll(Collection<?> c) {
4561 if (c != this) {
4562 for (Iterator<?> it = c.iterator(); it.hasNext();) {
4563 Object e = it.next();
4564 if (e == null || !contains(e))
4565 return false;
4566 }
4567 }
4568 return true;
4569 }
4570
4571 public final boolean removeAll(Collection<?> c) {
4572 boolean modified = false;
4573 for (Iterator<?> it = iterator(); it.hasNext();) {
4574 if (c.contains(it.next())) {
4575 it.remove();
4576 modified = true;
4577 }
4578 }
4579 return modified;
4580 }
4581
4582 public final boolean retainAll(Collection<?> c) {
4583 boolean modified = false;
4584 for (Iterator<?> it = iterator(); it.hasNext();) {
4585 if (!c.contains(it.next())) {
4586 it.remove();
4587 modified = true;
4588 }
4589 }
4590 return modified;
4591 }
4592
4593 }
4594
4595 /**
4596 * A view of a ConcurrentHashMap as a {@link Set} of keys, in
4597 * which additions may optionally be enabled by mapping to a
4598 * common value. This class cannot be directly instantiated. See
4599 * {@link #keySet}, {@link #keySet(Object)}, {@link #newKeySet()},
4600 * {@link #newKeySet(int)}.
4601 */
4602 public static class KeySetView<K,V> extends CHMView<K,V>
4603 implements Set<K>, java.io.Serializable {
4604 private static final long serialVersionUID = 7249069246763182397L;
4605 private final V value;
4606 KeySetView(ConcurrentHashMap<K, V> map, V value) { // non-public
4607 super(map);
4608 this.value = value;
4609 }
4610
4611 /**
4612 * Returns the default mapped value for additions,
4613 * or {@code null} if additions are not supported.
4614 *
4615 * @return the default mapped value for additions, or {@code null}
4616 * if not supported.
4617 */
4618 public V getMappedValue() { return value; }
4619
4620 // implement Set API
4621
4622 public boolean contains(Object o) { return map.containsKey(o); }
4623 public boolean remove(Object o) { return map.remove(o) != null; }
4624
4625 /**
4626 * Returns a "weakly consistent" iterator that will never
4627 * throw {@link ConcurrentModificationException}, and
4628 * guarantees to traverse elements as they existed upon
4629 * construction of the iterator, and may (but is not
4630 * guaranteed to) reflect any modifications subsequent to
4631 * construction.
4632 *
4633 * @return an iterator over the keys of this map
4634 */
4635 public Iterator<K> iterator() { return new KeyIterator<K,V>(map); }
4636 public boolean add(K e) {
4637 V v;
4638 if ((v = value) == null)
4639 throw new UnsupportedOperationException();
4640 if (e == null)
4641 throw new NullPointerException();
4642 return map.internalPut(e, v, true) == null;
4643 }
4644 public boolean addAll(Collection<? extends K> c) {
4645 boolean added = false;
4646 V v;
4647 if ((v = value) == null)
4648 throw new UnsupportedOperationException();
4649 for (K e : c) {
4650 if (e == null)
4651 throw new NullPointerException();
4652 if (map.internalPut(e, v, true) == null)
4653 added = true;
4654 }
4655 return added;
4656 }
4657 public boolean equals(Object o) {
4658 Set<?> c;
4659 return ((o instanceof Set) &&
4660 ((c = (Set<?>)o) == this ||
4661 (containsAll(c) && c.containsAll(this))));
4662 }
4663
4664 public Stream<K> stream() {
4665 return Streams.stream(() -> new KeyIterator<K,V>(map), 0);
4666 }
4667 public Stream<K> parallelStream() {
4668 return Streams.parallelStream(() -> new KeyIterator<K,V>(map, null),
4669 0);
4670 }
4671 }
4672
4673 /**
4674 * A view of a ConcurrentHashMap as a {@link Collection} of
4675 * values, in which additions are disabled. This class cannot be
4676 * directly instantiated. See {@link #values},
4677 *
4678 * <p>The view's {@code iterator} is a "weakly consistent" iterator
4679 * that will never throw {@link ConcurrentModificationException},
4680 * and guarantees to traverse elements as they existed upon
4681 * construction of the iterator, and may (but is not guaranteed to)
4682 * reflect any modifications subsequent to construction.
4683 */
4684 public static final class ValuesView<K,V> extends CHMView<K,V>
4685 implements Collection<V> {
4686 ValuesView(ConcurrentHashMap<K, V> map) { super(map); }
4687 public final boolean contains(Object o) { return map.containsValue(o); }
4688 public final boolean remove(Object o) {
4689 if (o != null) {
4690 Iterator<V> it = new ValueIterator<K,V>(map);
4691 while (it.hasNext()) {
4692 if (o.equals(it.next())) {
4693 it.remove();
4694 return true;
4695 }
4696 }
4697 }
4698 return false;
4699 }
4700
4701 /**
4702 * Returns a "weakly consistent" iterator that will never
4703 * throw {@link ConcurrentModificationException}, and
4704 * guarantees to traverse elements as they existed upon
4705 * construction of the iterator, and may (but is not
4706 * guaranteed to) reflect any modifications subsequent to
4707 * construction.
4708 *
4709 * @return an iterator over the values of this map
4710 */
4711 public final Iterator<V> iterator() {
4712 return new ValueIterator<K,V>(map);
4713 }
4714 public final boolean add(V e) {
4715 throw new UnsupportedOperationException();
4716 }
4717 public final boolean addAll(Collection<? extends V> c) {
4718 throw new UnsupportedOperationException();
4719 }
4720
4721 public Stream<V> stream() {
4722 return Streams.stream(() -> new ValueIterator<K,V>(map), 0);
4723 }
4724
4725 public Stream<V> parallelStream() {
4726 return Streams.parallelStream(() -> new ValueIterator<K,V>(map, null),
4727 0);
4728 }
4729
4730 }
4731
4732 /**
4733 * A view of a ConcurrentHashMap as a {@link Set} of (key, value)
4734 * entries. This class cannot be directly instantiated. See
4735 * {@link #entrySet}.
4736 */
4737 public static final class EntrySetView<K,V> extends CHMView<K,V>
4738 implements Set<Map.Entry<K,V>> {
4739 EntrySetView(ConcurrentHashMap<K, V> map) { super(map); }
4740 public final boolean contains(Object o) {
4741 Object k, v, r; Map.Entry<?,?> e;
4742 return ((o instanceof Map.Entry) &&
4743 (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
4744 (r = map.get(k)) != null &&
4745 (v = e.getValue()) != null &&
4746 (v == r || v.equals(r)));
4747 }
4748 public final boolean remove(Object o) {
4749 Object k, v; Map.Entry<?,?> e;
4750 return ((o instanceof Map.Entry) &&
4751 (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
4752 (v = e.getValue()) != null &&
4753 map.remove(k, v));
4754 }
4755
4756 /**
4757 * Returns a "weakly consistent" iterator that will never
4758 * throw {@link ConcurrentModificationException}, and
4759 * guarantees to traverse elements as they existed upon
4760 * construction of the iterator, and may (but is not
4761 * guaranteed to) reflect any modifications subsequent to
4762 * construction.
4763 *
4764 * @return an iterator over the entries of this map
4765 */
4766 public final Iterator<Map.Entry<K,V>> iterator() {
4767 return new EntryIterator<K,V>(map);
4768 }
4769
4770 public final boolean add(Entry<K,V> e) {
4771 K key = e.getKey();
4772 V value = e.getValue();
4773 if (key == null || value == null)
4774 throw new NullPointerException();
4775 return map.internalPut(key, value, false) == null;
4776 }
4777 public final boolean addAll(Collection<? extends Entry<K,V>> c) {
4778 boolean added = false;
4779 for (Entry<K,V> e : c) {
4780 if (add(e))
4781 added = true;
4782 }
4783 return added;
4784 }
4785 public boolean equals(Object o) {
4786 Set<?> c;
4787 return ((o instanceof Set) &&
4788 ((c = (Set<?>)o) == this ||
4789 (containsAll(c) && c.containsAll(this))));
4790 }
4791
4792 public Stream<Map.Entry<K,V>> stream() {
4793 return Streams.stream(() -> new EntryIterator<K,V>(map), 0);
4794 }
4795
4796 public Stream<Map.Entry<K,V>> parallelStream() {
4797 return Streams.parallelStream(() -> new EntryIterator<K,V>(map, null),
4798 0);
4799 }
4800 }
4801
4802 // ---------------------------------------------------------------------
4803
4804 /**
4805 * Predefined tasks for performing bulk parallel operations on
4806 * ConcurrentHashMaps. These tasks follow the forms and rules used
4807 * for bulk operations. Each method has the same name, but returns
4808 * a task rather than invoking it. These methods may be useful in
4809 * custom applications such as submitting a task without waiting
4810 * for completion, using a custom pool, or combining with other
4811 * tasks.
4812 */
4813 public static class ForkJoinTasks {
4814 private ForkJoinTasks() {}
4815
4816 /**
4817 * Returns a task that when invoked, performs the given
4818 * action for each (key, value)
4819 *
4820 * @param map the map
4821 * @param action the action
4822 * @return the task
4823 */
4824 public static <K,V> ForkJoinTask<Void> forEach
4825 (ConcurrentHashMap<K,V> map,
4826 BiBlock<? super K, ? super V> action) {
4827 if (action == null) throw new NullPointerException();
4828 return new ForEachMappingTask<K,V>(map, null, -1, action);
4829 }
4830
4831 /**
4832 * Returns a task that when invoked, performs the given
4833 * action for each non-null transformation of each (key, value)
4834 *
4835 * @param map the map
4836 * @param transformer a function returning the transformation
4837 * for an element, or null if there is no transformation (in
4838 * which case the action is not applied)
4839 * @param action the action
4840 * @return the task
4841 */
4842 public static <K,V,U> ForkJoinTask<Void> forEach
4843 (ConcurrentHashMap<K,V> map,
4844 BiFunction<? super K, ? super V, ? extends U> transformer,
4845 Block<? super U> action) {
4846 if (transformer == null || action == null)
4847 throw new NullPointerException();
4848 return new ForEachTransformedMappingTask<K,V,U>
4849 (map, null, -1, transformer, action);
4850 }
4851
4852 /**
4853 * Returns a task that when invoked, returns a non-null result
4854 * from applying the given search function on each (key,
4855 * value), or null if none. Upon success, further element
4856 * processing is suppressed and the results of any other
4857 * parallel invocations of the search function are ignored.
4858 *
4859 * @param map the map
4860 * @param searchFunction a function returning a non-null
4861 * result on success, else null
4862 * @return the task
4863 */
4864 public static <K,V,U> ForkJoinTask<U> search
4865 (ConcurrentHashMap<K,V> map,
4866 BiFunction<? super K, ? super V, ? extends U> searchFunction) {
4867 if (searchFunction == null) throw new NullPointerException();
4868 return new SearchMappingsTask<K,V,U>
4869 (map, null, -1, searchFunction,
4870 new AtomicReference<U>());
4871 }
4872
4873 /**
4874 * Returns a task that when invoked, returns the result of
4875 * accumulating the given transformation of all (key, value) pairs
4876 * using the given reducer to combine values, or null if none.
4877 *
4878 * @param map the map
4879 * @param transformer a function returning the transformation
4880 * for an element, or null if there is no transformation (in
4881 * which case it is not combined).
4882 * @param reducer a commutative associative combining function
4883 * @return the task
4884 */
4885 public static <K,V,U> ForkJoinTask<U> reduce
4886 (ConcurrentHashMap<K,V> map,
4887 BiFunction<? super K, ? super V, ? extends U> transformer,
4888 BiFunction<? super U, ? super U, ? extends U> reducer) {
4889 if (transformer == null || reducer == null)
4890 throw new NullPointerException();
4891 return new MapReduceMappingsTask<K,V,U>
4892 (map, null, -1, null, transformer, reducer);
4893 }
4894
4895 /**
4896 * Returns a task that when invoked, returns the result of
4897 * accumulating the given transformation of all (key, value) pairs
4898 * using the given reducer to combine values, and the given
4899 * basis as an identity value.
4900 *
4901 * @param map the map
4902 * @param transformer a function returning the transformation
4903 * for an element
4904 * @param basis the identity (initial default value) for the reduction
4905 * @param reducer a commutative associative combining function
4906 * @return the task
4907 */
4908 public static <K,V> ForkJoinTask<Double> reduceToDouble
4909 (ConcurrentHashMap<K,V> map,
4910 DoubleBiFunction<? super K, ? super V> transformer,
4911 double basis,
4912 DoubleBinaryOperator reducer) {
4913 if (transformer == null || reducer == null)
4914 throw new NullPointerException();
4915 return new MapReduceMappingsToDoubleTask<K,V>
4916 (map, null, -1, null, transformer, basis, reducer);
4917 }
4918
4919 /**
4920 * Returns a task that when invoked, returns the result of
4921 * accumulating the given transformation of all (key, value) pairs
4922 * using the given reducer to combine values, and the given
4923 * basis as an identity value.
4924 *
4925 * @param map the map
4926 * @param transformer a function returning the transformation
4927 * for an element
4928 * @param basis the identity (initial default value) for the reduction
4929 * @param reducer a commutative associative combining function
4930 * @return the task
4931 */
4932 public static <K,V> ForkJoinTask<Long> reduceToLong
4933 (ConcurrentHashMap<K,V> map,
4934 LongBiFunction<? super K, ? super V> transformer,
4935 long basis,
4936 LongBinaryOperator reducer) {
4937 if (transformer == null || reducer == null)
4938 throw new NullPointerException();
4939 return new MapReduceMappingsToLongTask<K,V>
4940 (map, null, -1, null, transformer, basis, reducer);
4941 }
4942
4943 /**
4944 * Returns a task that when invoked, returns the result of
4945 * accumulating the given transformation of all (key, value) pairs
4946 * using the given reducer to combine values, and the given
4947 * basis as an identity value.
4948 *
4949 * @param transformer a function returning the transformation
4950 * for an element
4951 * @param basis the identity (initial default value) for the reduction
4952 * @param reducer a commutative associative combining function
4953 * @return the task
4954 */
4955 public static <K,V> ForkJoinTask<Integer> reduceToInt
4956 (ConcurrentHashMap<K,V> map,
4957 IntBiFunction<? super K, ? super V> transformer,
4958 int basis,
4959 IntBinaryOperator reducer) {
4960 if (transformer == null || reducer == null)
4961 throw new NullPointerException();
4962 return new MapReduceMappingsToIntTask<K,V>
4963 (map, null, -1, null, transformer, basis, reducer);
4964 }
4965
4966 /**
4967 * Returns a task that when invoked, performs the given action
4968 * for each key.
4969 *
4970 * @param map the map
4971 * @param action the action
4972 * @return the task
4973 */
4974 public static <K,V> ForkJoinTask<Void> forEachKey
4975 (ConcurrentHashMap<K,V> map,
4976 Block<? super K> action) {
4977 if (action == null) throw new NullPointerException();
4978 return new ForEachKeyTask<K,V>(map, null, -1, action);
4979 }
4980
4981 /**
4982 * Returns a task that when invoked, performs the given action
4983 * for each non-null transformation of each key.
4984 *
4985 * @param map the map
4986 * @param transformer a function returning the transformation
4987 * for an element, or null if there is no transformation (in
4988 * which case the action is not applied)
4989 * @param action the action
4990 * @return the task
4991 */
4992 public static <K,V,U> ForkJoinTask<Void> forEachKey
4993 (ConcurrentHashMap<K,V> map,
4994 Function<? super K, ? extends U> transformer,
4995 Block<? super U> action) {
4996 if (transformer == null || action == null)
4997 throw new NullPointerException();
4998 return new ForEachTransformedKeyTask<K,V,U>
4999 (map, null, -1, transformer, action);
5000 }
5001
5002 /**
5003 * Returns a task that when invoked, returns a non-null result
5004 * from applying the given search function on each key, or
5005 * null if none. Upon success, further element processing is
5006 * suppressed and the results of any other parallel
5007 * invocations of the search function are ignored.
5008 *
5009 * @param map the map
5010 * @param searchFunction a function returning a non-null
5011 * result on success, else null
5012 * @return the task
5013 */
5014 public static <K,V,U> ForkJoinTask<U> searchKeys
5015 (ConcurrentHashMap<K,V> map,
5016 Function<? super K, ? extends U> searchFunction) {
5017 if (searchFunction == null) throw new NullPointerException();
5018 return new SearchKeysTask<K,V,U>
5019 (map, null, -1, searchFunction,
5020 new AtomicReference<U>());
5021 }
5022
5023 /**
5024 * Returns a task that when invoked, returns the result of
5025 * accumulating all keys using the given reducer to combine
5026 * values, or null if none.
5027 *
5028 * @param map the map
5029 * @param reducer a commutative associative combining function
5030 * @return the task
5031 */
5032 public static <K,V> ForkJoinTask<K> reduceKeys
5033 (ConcurrentHashMap<K,V> map,
5034 BiFunction<? super K, ? super K, ? extends K> reducer) {
5035 if (reducer == null) throw new NullPointerException();
5036 return new ReduceKeysTask<K,V>
5037 (map, null, -1, null, reducer);
5038 }
5039
5040 /**
5041 * Returns a task that when invoked, returns the result of
5042 * accumulating the given transformation of all keys using the given
5043 * reducer to combine values, or null if none.
5044 *
5045 * @param map the map
5046 * @param transformer a function returning the transformation
5047 * for an element, or null if there is no transformation (in
5048 * which case it is not combined).
5049 * @param reducer a commutative associative combining function
5050 * @return the task
5051 */
5052 public static <K,V,U> ForkJoinTask<U> reduceKeys
5053 (ConcurrentHashMap<K,V> map,
5054 Function<? super K, ? extends U> transformer,
5055 BiFunction<? super U, ? super U, ? extends U> reducer) {
5056 if (transformer == null || reducer == null)
5057 throw new NullPointerException();
5058 return new MapReduceKeysTask<K,V,U>
5059 (map, null, -1, null, transformer, reducer);
5060 }
5061
5062 /**
5063 * Returns a task that when invoked, returns the result of
5064 * accumulating the given transformation of all keys using the given
5065 * reducer to combine values, and the given basis as an
5066 * identity value.
5067 *
5068 * @param map the map
5069 * @param transformer a function returning the transformation
5070 * for an element
5071 * @param basis the identity (initial default value) for the reduction
5072 * @param reducer a commutative associative combining function
5073 * @return the task
5074 */
5075 public static <K,V> ForkJoinTask<Double> reduceKeysToDouble
5076 (ConcurrentHashMap<K,V> map,
5077 DoubleFunction<? super K> transformer,
5078 double basis,
5079 DoubleBinaryOperator reducer) {
5080 if (transformer == null || reducer == null)
5081 throw new NullPointerException();
5082 return new MapReduceKeysToDoubleTask<K,V>
5083 (map, null, -1, null, transformer, basis, reducer);
5084 }
5085
5086 /**
5087 * Returns a task that when invoked, returns the result of
5088 * accumulating the given transformation of all keys using the given
5089 * reducer to combine values, and the given basis as an
5090 * identity value.
5091 *
5092 * @param map the map
5093 * @param transformer a function returning the transformation
5094 * for an element
5095 * @param basis the identity (initial default value) for the reduction
5096 * @param reducer a commutative associative combining function
5097 * @return the task
5098 */
5099 public static <K,V> ForkJoinTask<Long> reduceKeysToLong
5100 (ConcurrentHashMap<K,V> map,
5101 LongFunction<? super K> transformer,
5102 long basis,
5103 LongBinaryOperator reducer) {
5104 if (transformer == null || reducer == null)
5105 throw new NullPointerException();
5106 return new MapReduceKeysToLongTask<K,V>
5107 (map, null, -1, null, transformer, basis, reducer);
5108 }
5109
5110 /**
5111 * Returns a task that when invoked, returns the result of
5112 * accumulating the given transformation of all keys using the given
5113 * reducer to combine values, and the given basis as an
5114 * identity value.
5115 *
5116 * @param map the map
5117 * @param transformer a function returning the transformation
5118 * for an element
5119 * @param basis the identity (initial default value) for the reduction
5120 * @param reducer a commutative associative combining function
5121 * @return the task
5122 */
5123 public static <K,V> ForkJoinTask<Integer> reduceKeysToInt
5124 (ConcurrentHashMap<K,V> map,
5125 IntFunction<? super K> transformer,
5126 int basis,
5127 IntBinaryOperator reducer) {
5128 if (transformer == null || reducer == null)
5129 throw new NullPointerException();
5130 return new MapReduceKeysToIntTask<K,V>
5131 (map, null, -1, null, transformer, basis, reducer);
5132 }
5133
5134 /**
5135 * Returns a task that when invoked, performs the given action
5136 * for each value.
5137 *
5138 * @param map the map
5139 * @param action the action
5140 */
5141 public static <K,V> ForkJoinTask<Void> forEachValue
5142 (ConcurrentHashMap<K,V> map,
5143 Block<? super V> action) {
5144 if (action == null) throw new NullPointerException();
5145 return new ForEachValueTask<K,V>(map, null, -1, action);
5146 }
5147
5148 /**
5149 * Returns a task that when invoked, performs the given action
5150 * for each non-null transformation of each value.
5151 *
5152 * @param map the map
5153 * @param transformer a function returning the transformation
5154 * for an element, or null if there is no transformation (in
5155 * which case the action is not applied)
5156 * @param action the action
5157 */
5158 public static <K,V,U> ForkJoinTask<Void> forEachValue
5159 (ConcurrentHashMap<K,V> map,
5160 Function<? super V, ? extends U> transformer,
5161 Block<? super U> action) {
5162 if (transformer == null || action == null)
5163 throw new NullPointerException();
5164 return new ForEachTransformedValueTask<K,V,U>
5165 (map, null, -1, transformer, action);
5166 }
5167
5168 /**
5169 * Returns a task that when invoked, returns a non-null result
5170 * from applying the given search function on each value, or
5171 * null if none. Upon success, further element processing is
5172 * suppressed and the results of any other parallel
5173 * invocations of the search function are ignored.
5174 *
5175 * @param map the map
5176 * @param searchFunction a function returning a non-null
5177 * result on success, else null
5178 * @return the task
5179 */
5180 public static <K,V,U> ForkJoinTask<U> searchValues
5181 (ConcurrentHashMap<K,V> map,
5182 Function<? super V, ? extends U> searchFunction) {
5183 if (searchFunction == null) throw new NullPointerException();
5184 return new SearchValuesTask<K,V,U>
5185 (map, null, -1, searchFunction,
5186 new AtomicReference<U>());
5187 }
5188
5189 /**
5190 * Returns a task that when invoked, returns the result of
5191 * accumulating all values using the given reducer to combine
5192 * values, or null if none.
5193 *
5194 * @param map the map
5195 * @param reducer a commutative associative combining function
5196 * @return the task
5197 */
5198 public static <K,V> ForkJoinTask<V> reduceValues
5199 (ConcurrentHashMap<K,V> map,
5200 BiFunction<? super V, ? super V, ? extends V> reducer) {
5201 if (reducer == null) throw new NullPointerException();
5202 return new ReduceValuesTask<K,V>
5203 (map, null, -1, null, reducer);
5204 }
5205
5206 /**
5207 * Returns a task that when invoked, returns the result of
5208 * accumulating the given transformation of all values using the
5209 * given reducer to combine values, or null if none.
5210 *
5211 * @param map the map
5212 * @param transformer a function returning the transformation
5213 * for an element, or null if there is no transformation (in
5214 * which case it is not combined).
5215 * @param reducer a commutative associative combining function
5216 * @return the task
5217 */
5218 public static <K,V,U> ForkJoinTask<U> reduceValues
5219 (ConcurrentHashMap<K,V> map,
5220 Function<? super V, ? extends U> transformer,
5221 BiFunction<? super U, ? super U, ? extends U> reducer) {
5222 if (transformer == null || reducer == null)
5223 throw new NullPointerException();
5224 return new MapReduceValuesTask<K,V,U>
5225 (map, null, -1, null, transformer, reducer);
5226 }
5227
5228 /**
5229 * Returns a task that when invoked, returns the result of
5230 * accumulating the given transformation of all values using the
5231 * given reducer to combine values, and the given basis as an
5232 * identity value.
5233 *
5234 * @param map the map
5235 * @param transformer a function returning the transformation
5236 * for an element
5237 * @param basis the identity (initial default value) for the reduction
5238 * @param reducer a commutative associative combining function
5239 * @return the task
5240 */
5241 public static <K,V> ForkJoinTask<Double> reduceValuesToDouble
5242 (ConcurrentHashMap<K,V> map,
5243 DoubleFunction<? super V> transformer,
5244 double basis,
5245 DoubleBinaryOperator reducer) {
5246 if (transformer == null || reducer == null)
5247 throw new NullPointerException();
5248 return new MapReduceValuesToDoubleTask<K,V>
5249 (map, null, -1, null, transformer, basis, reducer);
5250 }
5251
5252 /**
5253 * Returns a task that when invoked, returns the result of
5254 * accumulating the given transformation of all values using the
5255 * given reducer to combine values, and the given basis as an
5256 * identity value.
5257 *
5258 * @param map the map
5259 * @param transformer a function returning the transformation
5260 * for an element
5261 * @param basis the identity (initial default value) for the reduction
5262 * @param reducer a commutative associative combining function
5263 * @return the task
5264 */
5265 public static <K,V> ForkJoinTask<Long> reduceValuesToLong
5266 (ConcurrentHashMap<K,V> map,
5267 LongFunction<? super V> transformer,
5268 long basis,
5269 LongBinaryOperator reducer) {
5270 if (transformer == null || reducer == null)
5271 throw new NullPointerException();
5272 return new MapReduceValuesToLongTask<K,V>
5273 (map, null, -1, null, transformer, basis, reducer);
5274 }
5275
5276 /**
5277 * Returns a task that when invoked, returns the result of
5278 * accumulating the given transformation of all values using the
5279 * given reducer to combine values, and the given basis as an
5280 * identity value.
5281 *
5282 * @param map the map
5283 * @param transformer a function returning the transformation
5284 * for an element
5285 * @param basis the identity (initial default value) for the reduction
5286 * @param reducer a commutative associative combining function
5287 * @return the task
5288 */
5289 public static <K,V> ForkJoinTask<Integer> reduceValuesToInt
5290 (ConcurrentHashMap<K,V> map,
5291 IntFunction<? super V> transformer,
5292 int basis,
5293 IntBinaryOperator reducer) {
5294 if (transformer == null || reducer == null)
5295 throw new NullPointerException();
5296 return new MapReduceValuesToIntTask<K,V>
5297 (map, null, -1, null, transformer, basis, reducer);
5298 }
5299
5300 /**
5301 * Returns a task that when invoked, perform the given action
5302 * for each entry.
5303 *
5304 * @param map the map
5305 * @param action the action
5306 */
5307 public static <K,V> ForkJoinTask<Void> forEachEntry
5308 (ConcurrentHashMap<K,V> map,
5309 Block<? super Map.Entry<K,V>> action) {
5310 if (action == null) throw new NullPointerException();
5311 return new ForEachEntryTask<K,V>(map, null, -1, action);
5312 }
5313
5314 /**
5315 * Returns a task that when invoked, perform the given action
5316 * for each non-null transformation of each entry.
5317 *
5318 * @param map the map
5319 * @param transformer a function returning the transformation
5320 * for an element, or null if there is no transformation (in
5321 * which case the action is not applied)
5322 * @param action the action
5323 */
5324 public static <K,V,U> ForkJoinTask<Void> forEachEntry
5325 (ConcurrentHashMap<K,V> map,
5326 Function<Map.Entry<K,V>, ? extends U> transformer,
5327 Block<? super U> action) {
5328 if (transformer == null || action == null)
5329 throw new NullPointerException();
5330 return new ForEachTransformedEntryTask<K,V,U>
5331 (map, null, -1, transformer, action);
5332 }
5333
5334 /**
5335 * Returns a task that when invoked, returns a non-null result
5336 * from applying the given search function on each entry, or
5337 * null if none. Upon success, further element processing is
5338 * suppressed and the results of any other parallel
5339 * invocations of the search function are ignored.
5340 *
5341 * @param map the map
5342 * @param searchFunction a function returning a non-null
5343 * result on success, else null
5344 * @return the task
5345 */
5346 public static <K,V,U> ForkJoinTask<U> searchEntries
5347 (ConcurrentHashMap<K,V> map,
5348 Function<Map.Entry<K,V>, ? extends U> searchFunction) {
5349 if (searchFunction == null) throw new NullPointerException();
5350 return new SearchEntriesTask<K,V,U>
5351 (map, null, -1, searchFunction,
5352 new AtomicReference<U>());
5353 }
5354
5355 /**
5356 * Returns a task that when invoked, returns the result of
5357 * accumulating all entries using the given reducer to combine
5358 * values, or null if none.
5359 *
5360 * @param map the map
5361 * @param reducer a commutative associative combining function
5362 * @return the task
5363 */
5364 public static <K,V> ForkJoinTask<Map.Entry<K,V>> reduceEntries
5365 (ConcurrentHashMap<K,V> map,
5366 BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
5367 if (reducer == null) throw new NullPointerException();
5368 return new ReduceEntriesTask<K,V>
5369 (map, null, -1, null, reducer);
5370 }
5371
5372 /**
5373 * Returns a task that when invoked, returns the result of
5374 * accumulating the given transformation of all entries using the
5375 * given reducer to combine values, or null if none.
5376 *
5377 * @param map the map
5378 * @param transformer a function returning the transformation
5379 * for an element, or null if there is no transformation (in
5380 * which case it is not combined).
5381 * @param reducer a commutative associative combining function
5382 * @return the task
5383 */
5384 public static <K,V,U> ForkJoinTask<U> reduceEntries
5385 (ConcurrentHashMap<K,V> map,
5386 Function<Map.Entry<K,V>, ? extends U> transformer,
5387 BiFunction<? super U, ? super U, ? extends U> reducer) {
5388 if (transformer == null || reducer == null)
5389 throw new NullPointerException();
5390 return new MapReduceEntriesTask<K,V,U>
5391 (map, null, -1, null, transformer, reducer);
5392 }
5393
5394 /**
5395 * Returns a task that when invoked, returns the result of
5396 * accumulating the given transformation of all entries using the
5397 * given reducer to combine values, and the given basis as an
5398 * identity value.
5399 *
5400 * @param map the map
5401 * @param transformer a function returning the transformation
5402 * for an element
5403 * @param basis the identity (initial default value) for the reduction
5404 * @param reducer a commutative associative combining function
5405 * @return the task
5406 */
5407 public static <K,V> ForkJoinTask<Double> reduceEntriesToDouble
5408 (ConcurrentHashMap<K,V> map,
5409 DoubleFunction<Map.Entry<K,V>> transformer,
5410 double basis,
5411 DoubleBinaryOperator reducer) {
5412 if (transformer == null || reducer == null)
5413 throw new NullPointerException();
5414 return new MapReduceEntriesToDoubleTask<K,V>
5415 (map, null, -1, null, transformer, basis, reducer);
5416 }
5417
5418 /**
5419 * Returns a task that when invoked, returns the result of
5420 * accumulating the given transformation of all entries using the
5421 * given reducer to combine values, and the given basis as an
5422 * identity value.
5423 *
5424 * @param map the map
5425 * @param transformer a function returning the transformation
5426 * for an element
5427 * @param basis the identity (initial default value) for the reduction
5428 * @param reducer a commutative associative combining function
5429 * @return the task
5430 */
5431 public static <K,V> ForkJoinTask<Long> reduceEntriesToLong
5432 (ConcurrentHashMap<K,V> map,
5433 LongFunction<Map.Entry<K,V>> transformer,
5434 long basis,
5435 LongBinaryOperator reducer) {
5436 if (transformer == null || reducer == null)
5437 throw new NullPointerException();
5438 return new MapReduceEntriesToLongTask<K,V>
5439 (map, null, -1, null, transformer, basis, reducer);
5440 }
5441
5442 /**
5443 * Returns a task that when invoked, returns the result of
5444 * accumulating the given transformation of all entries using the
5445 * given reducer to combine values, and the given basis as an
5446 * identity value.
5447 *
5448 * @param map the map
5449 * @param transformer a function returning the transformation
5450 * for an element
5451 * @param basis the identity (initial default value) for the reduction
5452 * @param reducer a commutative associative combining function
5453 * @return the task
5454 */
5455 public static <K,V> ForkJoinTask<Integer> reduceEntriesToInt
5456 (ConcurrentHashMap<K,V> map,
5457 IntFunction<Map.Entry<K,V>> transformer,
5458 int basis,
5459 IntBinaryOperator reducer) {
5460 if (transformer == null || reducer == null)
5461 throw new NullPointerException();
5462 return new MapReduceEntriesToIntTask<K,V>
5463 (map, null, -1, null, transformer, basis, reducer);
5464 }
5465 }
5466
5467 // -------------------------------------------------------
5468
5469 /*
5470 * Task classes. Coded in a regular but ugly format/style to
5471 * simplify checks that each variant differs in the right way from
5472 * others. The null screenings exist because compilers cannot tell
5473 * that we've already null-checked task arguments, so we force
5474 * simplest hoisted bypass to help avoid convoluted traps.
5475 */
5476
5477 @SuppressWarnings("serial") static final class ForEachKeyTask<K,V>
5478 extends Traverser<K,V,Void> {
5479 final Block<? super K> action;
5480 ForEachKeyTask
5481 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5482 Block<? super K> action) {
5483 super(m, p, b);
5484 this.action = action;
5485 }
5486 @SuppressWarnings("unchecked") public final void compute() {
5487 final Block<? super K> action;
5488 if ((action = this.action) != null) {
5489 for (int b; (b = preSplit()) > 0;)
5490 new ForEachKeyTask<K,V>(map, this, b, action).fork();
5491 while (advance() != null)
5492 action.accept((K)nextKey);
5493 propagateCompletion();
5494 }
5495 }
5496 }
5497
5498 @SuppressWarnings("serial") static final class ForEachValueTask<K,V>
5499 extends Traverser<K,V,Void> {
5500 final Block<? super V> action;
5501 ForEachValueTask
5502 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5503 Block<? super V> action) {
5504 super(m, p, b);
5505 this.action = action;
5506 }
5507 @SuppressWarnings("unchecked") public final void compute() {
5508 final Block<? super V> action;
5509 if ((action = this.action) != null) {
5510 for (int b; (b = preSplit()) > 0;)
5511 new ForEachValueTask<K,V>(map, this, b, action).fork();
5512 V v;
5513 while ((v = advance()) != null)
5514 action.accept(v);
5515 propagateCompletion();
5516 }
5517 }
5518 }
5519
5520 @SuppressWarnings("serial") static final class ForEachEntryTask<K,V>
5521 extends Traverser<K,V,Void> {
5522 final Block<? super Entry<K,V>> action;
5523 ForEachEntryTask
5524 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5525 Block<? super Entry<K,V>> action) {
5526 super(m, p, b);
5527 this.action = action;
5528 }
5529 @SuppressWarnings("unchecked") public final void compute() {
5530 final Block<? super Entry<K,V>> action;
5531 if ((action = this.action) != null) {
5532 for (int b; (b = preSplit()) > 0;)
5533 new ForEachEntryTask<K,V>(map, this, b, action).fork();
5534 V v;
5535 while ((v = advance()) != null)
5536 action.accept(entryFor((K)nextKey, v));
5537 propagateCompletion();
5538 }
5539 }
5540 }
5541
5542 @SuppressWarnings("serial") static final class ForEachMappingTask<K,V>
5543 extends Traverser<K,V,Void> {
5544 final BiBlock<? super K, ? super V> action;
5545 ForEachMappingTask
5546 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5547 BiBlock<? super K,? super V> action) {
5548 super(m, p, b);
5549 this.action = action;
5550 }
5551 @SuppressWarnings("unchecked") public final void compute() {
5552 final BiBlock<? super K, ? super V> action;
5553 if ((action = this.action) != null) {
5554 for (int b; (b = preSplit()) > 0;)
5555 new ForEachMappingTask<K,V>(map, this, b, action).fork();
5556 V v;
5557 while ((v = advance()) != null)
5558 action.accept((K)nextKey, v);
5559 propagateCompletion();
5560 }
5561 }
5562 }
5563
5564 @SuppressWarnings("serial") static final class ForEachTransformedKeyTask<K,V,U>
5565 extends Traverser<K,V,Void> {
5566 final Function<? super K, ? extends U> transformer;
5567 final Block<? super U> action;
5568 ForEachTransformedKeyTask
5569 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5570 Function<? super K, ? extends U> transformer, Block<? super U> action) {
5571 super(m, p, b);
5572 this.transformer = transformer; this.action = action;
5573 }
5574 @SuppressWarnings("unchecked") public final void compute() {
5575 final Function<? super K, ? extends U> transformer;
5576 final Block<? super U> action;
5577 if ((transformer = this.transformer) != null &&
5578 (action = this.action) != null) {
5579 for (int b; (b = preSplit()) > 0;)
5580 new ForEachTransformedKeyTask<K,V,U>
5581 (map, this, b, transformer, action).fork();
5582 U u;
5583 while (advance() != null) {
5584 if ((u = transformer.apply((K)nextKey)) != null)
5585 action.accept(u);
5586 }
5587 propagateCompletion();
5588 }
5589 }
5590 }
5591
5592 @SuppressWarnings("serial") static final class ForEachTransformedValueTask<K,V,U>
5593 extends Traverser<K,V,Void> {
5594 final Function<? super V, ? extends U> transformer;
5595 final Block<? super U> action;
5596 ForEachTransformedValueTask
5597 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5598 Function<? super V, ? extends U> transformer, Block<? super U> action) {
5599 super(m, p, b);
5600 this.transformer = transformer; this.action = action;
5601 }
5602 @SuppressWarnings("unchecked") public final void compute() {
5603 final Function<? super V, ? extends U> transformer;
5604 final Block<? super U> action;
5605 if ((transformer = this.transformer) != null &&
5606 (action = this.action) != null) {
5607 for (int b; (b = preSplit()) > 0;)
5608 new ForEachTransformedValueTask<K,V,U>
5609 (map, this, b, transformer, action).fork();
5610 V v; U u;
5611 while ((v = advance()) != null) {
5612 if ((u = transformer.apply(v)) != null)
5613 action.accept(u);
5614 }
5615 propagateCompletion();
5616 }
5617 }
5618 }
5619
5620 @SuppressWarnings("serial") static final class ForEachTransformedEntryTask<K,V,U>
5621 extends Traverser<K,V,Void> {
5622 final Function<Map.Entry<K,V>, ? extends U> transformer;
5623 final Block<? super U> action;
5624 ForEachTransformedEntryTask
5625 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5626 Function<Map.Entry<K,V>, ? extends U> transformer, Block<? super U> action) {
5627 super(m, p, b);
5628 this.transformer = transformer; this.action = action;
5629 }
5630 @SuppressWarnings("unchecked") public final void compute() {
5631 final Function<Map.Entry<K,V>, ? extends U> transformer;
5632 final Block<? super U> action;
5633 if ((transformer = this.transformer) != null &&
5634 (action = this.action) != null) {
5635 for (int b; (b = preSplit()) > 0;)
5636 new ForEachTransformedEntryTask<K,V,U>
5637 (map, this, b, transformer, action).fork();
5638 V v; U u;
5639 while ((v = advance()) != null) {
5640 if ((u = transformer.apply(entryFor((K)nextKey,
5641 v))) != null)
5642 action.accept(u);
5643 }
5644 propagateCompletion();
5645 }
5646 }
5647 }
5648
5649 @SuppressWarnings("serial") static final class ForEachTransformedMappingTask<K,V,U>
5650 extends Traverser<K,V,Void> {
5651 final BiFunction<? super K, ? super V, ? extends U> transformer;
5652 final Block<? super U> action;
5653 ForEachTransformedMappingTask
5654 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5655 BiFunction<? super K, ? super V, ? extends U> transformer,
5656 Block<? super U> action) {
5657 super(m, p, b);
5658 this.transformer = transformer; this.action = action;
5659 }
5660 @SuppressWarnings("unchecked") public final void compute() {
5661 final BiFunction<? super K, ? super V, ? extends U> transformer;
5662 final Block<? super U> action;
5663 if ((transformer = this.transformer) != null &&
5664 (action = this.action) != null) {
5665 for (int b; (b = preSplit()) > 0;)
5666 new ForEachTransformedMappingTask<K,V,U>
5667 (map, this, b, transformer, action).fork();
5668 V v; U u;
5669 while ((v = advance()) != null) {
5670 if ((u = transformer.apply((K)nextKey, v)) != null)
5671 action.accept(u);
5672 }
5673 propagateCompletion();
5674 }
5675 }
5676 }
5677
5678 @SuppressWarnings("serial") static final class SearchKeysTask<K,V,U>
5679 extends Traverser<K,V,U> {
5680 final Function<? super K, ? extends U> searchFunction;
5681 final AtomicReference<U> result;
5682 SearchKeysTask
5683 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5684 Function<? super K, ? extends U> searchFunction,
5685 AtomicReference<U> result) {
5686 super(m, p, b);
5687 this.searchFunction = searchFunction; this.result = result;
5688 }
5689 public final U getRawResult() { return result.get(); }
5690 @SuppressWarnings("unchecked") public final void compute() {
5691 final Function<? super K, ? extends U> searchFunction;
5692 final AtomicReference<U> result;
5693 if ((searchFunction = this.searchFunction) != null &&
5694 (result = this.result) != null) {
5695 for (int b;;) {
5696 if (result.get() != null)
5697 return;
5698 if ((b = preSplit()) <= 0)
5699 break;
5700 new SearchKeysTask<K,V,U>
5701 (map, this, b, searchFunction, result).fork();
5702 }
5703 while (result.get() == null) {
5704 U u;
5705 if (advance() == null) {
5706 propagateCompletion();
5707 break;
5708 }
5709 if ((u = searchFunction.apply((K)nextKey)) != null) {
5710 if (result.compareAndSet(null, u))
5711 quietlyCompleteRoot();
5712 break;
5713 }
5714 }
5715 }
5716 }
5717 }
5718
5719 @SuppressWarnings("serial") static final class SearchValuesTask<K,V,U>
5720 extends Traverser<K,V,U> {
5721 final Function<? super V, ? extends U> searchFunction;
5722 final AtomicReference<U> result;
5723 SearchValuesTask
5724 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5725 Function<? super V, ? extends U> searchFunction,
5726 AtomicReference<U> result) {
5727 super(m, p, b);
5728 this.searchFunction = searchFunction; this.result = result;
5729 }
5730 public final U getRawResult() { return result.get(); }
5731 @SuppressWarnings("unchecked") public final void compute() {
5732 final Function<? super V, ? extends U> searchFunction;
5733 final AtomicReference<U> result;
5734 if ((searchFunction = this.searchFunction) != null &&
5735 (result = this.result) != null) {
5736 for (int b;;) {
5737 if (result.get() != null)
5738 return;
5739 if ((b = preSplit()) <= 0)
5740 break;
5741 new SearchValuesTask<K,V,U>
5742 (map, this, b, searchFunction, result).fork();
5743 }
5744 while (result.get() == null) {
5745 V v; U u;
5746 if ((v = advance()) == null) {
5747 propagateCompletion();
5748 break;
5749 }
5750 if ((u = searchFunction.apply(v)) != null) {
5751 if (result.compareAndSet(null, u))
5752 quietlyCompleteRoot();
5753 break;
5754 }
5755 }
5756 }
5757 }
5758 }
5759
5760 @SuppressWarnings("serial") static final class SearchEntriesTask<K,V,U>
5761 extends Traverser<K,V,U> {
5762 final Function<Entry<K,V>, ? extends U> searchFunction;
5763 final AtomicReference<U> result;
5764 SearchEntriesTask
5765 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5766 Function<Entry<K,V>, ? extends U> searchFunction,
5767 AtomicReference<U> result) {
5768 super(m, p, b);
5769 this.searchFunction = searchFunction; this.result = result;
5770 }
5771 public final U getRawResult() { return result.get(); }
5772 @SuppressWarnings("unchecked") public final void compute() {
5773 final Function<Entry<K,V>, ? extends U> searchFunction;
5774 final AtomicReference<U> result;
5775 if ((searchFunction = this.searchFunction) != null &&
5776 (result = this.result) != null) {
5777 for (int b;;) {
5778 if (result.get() != null)
5779 return;
5780 if ((b = preSplit()) <= 0)
5781 break;
5782 new SearchEntriesTask<K,V,U>
5783 (map, this, b, searchFunction, result).fork();
5784 }
5785 while (result.get() == null) {
5786 V v; U u;
5787 if ((v = advance()) == null) {
5788 propagateCompletion();
5789 break;
5790 }
5791 if ((u = searchFunction.apply(entryFor((K)nextKey,
5792 v))) != null) {
5793 if (result.compareAndSet(null, u))
5794 quietlyCompleteRoot();
5795 return;
5796 }
5797 }
5798 }
5799 }
5800 }
5801
5802 @SuppressWarnings("serial") static final class SearchMappingsTask<K,V,U>
5803 extends Traverser<K,V,U> {
5804 final BiFunction<? super K, ? super V, ? extends U> searchFunction;
5805 final AtomicReference<U> result;
5806 SearchMappingsTask
5807 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5808 BiFunction<? super K, ? super V, ? extends U> searchFunction,
5809 AtomicReference<U> result) {
5810 super(m, p, b);
5811 this.searchFunction = searchFunction; this.result = result;
5812 }
5813 public final U getRawResult() { return result.get(); }
5814 @SuppressWarnings("unchecked") public final void compute() {
5815 final BiFunction<? super K, ? super V, ? extends U> searchFunction;
5816 final AtomicReference<U> result;
5817 if ((searchFunction = this.searchFunction) != null &&
5818 (result = this.result) != null) {
5819 for (int b;;) {
5820 if (result.get() != null)
5821 return;
5822 if ((b = preSplit()) <= 0)
5823 break;
5824 new SearchMappingsTask<K,V,U>
5825 (map, this, b, searchFunction, result).fork();
5826 }
5827 while (result.get() == null) {
5828 V v; U u;
5829 if ((v = advance()) == null) {
5830 propagateCompletion();
5831 break;
5832 }
5833 if ((u = searchFunction.apply((K)nextKey, v)) != null) {
5834 if (result.compareAndSet(null, u))
5835 quietlyCompleteRoot();
5836 break;
5837 }
5838 }
5839 }
5840 }
5841 }
5842
5843 @SuppressWarnings("serial") static final class ReduceKeysTask<K,V>
5844 extends Traverser<K,V,K> {
5845 final BiFunction<? super K, ? super K, ? extends K> reducer;
5846 K result;
5847 ReduceKeysTask<K,V> rights, nextRight;
5848 ReduceKeysTask
5849 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5850 ReduceKeysTask<K,V> nextRight,
5851 BiFunction<? super K, ? super K, ? extends K> reducer) {
5852 super(m, p, b); this.nextRight = nextRight;
5853 this.reducer = reducer;
5854 }
5855 public final K getRawResult() { return result; }
5856 @SuppressWarnings("unchecked") public final void compute() {
5857 final BiFunction<? super K, ? super K, ? extends K> reducer;
5858 if ((reducer = this.reducer) != null) {
5859 for (int b; (b = preSplit()) > 0;)
5860 (rights = new ReduceKeysTask<K,V>
5861 (map, this, b, rights, reducer)).fork();
5862 K r = null;
5863 while (advance() != null) {
5864 K u = (K)nextKey;
5865 r = (r == null) ? u : u == null ? r : reducer.apply(r, u);
5866 }
5867 result = r;
5868 CountedCompleter<?> c;
5869 for (c = firstComplete(); c != null; c = c.nextComplete()) {
5870 ReduceKeysTask<K,V>
5871 t = (ReduceKeysTask<K,V>)c,
5872 s = t.rights;
5873 while (s != null) {
5874 K tr, sr;
5875 if ((sr = s.result) != null)
5876 t.result = (((tr = t.result) == null) ? sr :
5877 reducer.apply(tr, sr));
5878 s = t.rights = s.nextRight;
5879 }
5880 }
5881 }
5882 }
5883 }
5884
5885 @SuppressWarnings("serial") static final class ReduceValuesTask<K,V>
5886 extends Traverser<K,V,V> {
5887 final BiFunction<? super V, ? super V, ? extends V> reducer;
5888 V result;
5889 ReduceValuesTask<K,V> rights, nextRight;
5890 ReduceValuesTask
5891 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5892 ReduceValuesTask<K,V> nextRight,
5893 BiFunction<? super V, ? super V, ? extends V> reducer) {
5894 super(m, p, b); this.nextRight = nextRight;
5895 this.reducer = reducer;
5896 }
5897 public final V getRawResult() { return result; }
5898 @SuppressWarnings("unchecked") public final void compute() {
5899 final BiFunction<? super V, ? super V, ? extends V> reducer;
5900 if ((reducer = this.reducer) != null) {
5901 for (int b; (b = preSplit()) > 0;)
5902 (rights = new ReduceValuesTask<K,V>
5903 (map, this, b, rights, reducer)).fork();
5904 V r = null, v;
5905 while ((v = advance()) != null)
5906 r = (r == null) ? v : reducer.apply(r, v);
5907 result = r;
5908 CountedCompleter<?> c;
5909 for (c = firstComplete(); c != null; c = c.nextComplete()) {
5910 ReduceValuesTask<K,V>
5911 t = (ReduceValuesTask<K,V>)c,
5912 s = t.rights;
5913 while (s != null) {
5914 V tr, sr;
5915 if ((sr = s.result) != null)
5916 t.result = (((tr = t.result) == null) ? sr :
5917 reducer.apply(tr, sr));
5918 s = t.rights = s.nextRight;
5919 }
5920 }
5921 }
5922 }
5923 }
5924
5925 @SuppressWarnings("serial") static final class ReduceEntriesTask<K,V>
5926 extends Traverser<K,V,Map.Entry<K,V>> {
5927 final BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer;
5928 Map.Entry<K,V> result;
5929 ReduceEntriesTask<K,V> rights, nextRight;
5930 ReduceEntriesTask
5931 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5932 ReduceEntriesTask<K,V> nextRight,
5933 BiFunction<Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
5934 super(m, p, b); this.nextRight = nextRight;
5935 this.reducer = reducer;
5936 }
5937 public final Map.Entry<K,V> getRawResult() { return result; }
5938 @SuppressWarnings("unchecked") public final void compute() {
5939 final BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer;
5940 if ((reducer = this.reducer) != null) {
5941 for (int b; (b = preSplit()) > 0;)
5942 (rights = new ReduceEntriesTask<K,V>
5943 (map, this, b, rights, reducer)).fork();
5944 Map.Entry<K,V> r = null;
5945 V v;
5946 while ((v = advance()) != null) {
5947 Map.Entry<K,V> u = entryFor((K)nextKey, v);
5948 r = (r == null) ? u : reducer.apply(r, u);
5949 }
5950 result = r;
5951 CountedCompleter<?> c;
5952 for (c = firstComplete(); c != null; c = c.nextComplete()) {
5953 ReduceEntriesTask<K,V>
5954 t = (ReduceEntriesTask<K,V>)c,
5955 s = t.rights;
5956 while (s != null) {
5957 Map.Entry<K,V> tr, sr;
5958 if ((sr = s.result) != null)
5959 t.result = (((tr = t.result) == null) ? sr :
5960 reducer.apply(tr, sr));
5961 s = t.rights = s.nextRight;
5962 }
5963 }
5964 }
5965 }
5966 }
5967
5968 @SuppressWarnings("serial") static final class MapReduceKeysTask<K,V,U>
5969 extends Traverser<K,V,U> {
5970 final Function<? super K, ? extends U> transformer;
5971 final BiFunction<? super U, ? super U, ? extends U> reducer;
5972 U result;
5973 MapReduceKeysTask<K,V,U> rights, nextRight;
5974 MapReduceKeysTask
5975 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5976 MapReduceKeysTask<K,V,U> nextRight,
5977 Function<? super K, ? extends U> transformer,
5978 BiFunction<? super U, ? super U, ? extends U> reducer) {
5979 super(m, p, b); this.nextRight = nextRight;
5980 this.transformer = transformer;
5981 this.reducer = reducer;
5982 }
5983 public final U getRawResult() { return result; }
5984 @SuppressWarnings("unchecked") public final void compute() {
5985 final Function<? super K, ? extends U> transformer;
5986 final BiFunction<? super U, ? super U, ? extends U> reducer;
5987 if ((transformer = this.transformer) != null &&
5988 (reducer = this.reducer) != null) {
5989 for (int b; (b = preSplit()) > 0;)
5990 (rights = new MapReduceKeysTask<K,V,U>
5991 (map, this, b, rights, transformer, reducer)).fork();
5992 U r = null, u;
5993 while (advance() != null) {
5994 if ((u = transformer.apply((K)nextKey)) != null)
5995 r = (r == null) ? u : reducer.apply(r, u);
5996 }
5997 result = r;
5998 CountedCompleter<?> c;
5999 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6000 MapReduceKeysTask<K,V,U>
6001 t = (MapReduceKeysTask<K,V,U>)c,
6002 s = t.rights;
6003 while (s != null) {
6004 U tr, sr;
6005 if ((sr = s.result) != null)
6006 t.result = (((tr = t.result) == null) ? sr :
6007 reducer.apply(tr, sr));
6008 s = t.rights = s.nextRight;
6009 }
6010 }
6011 }
6012 }
6013 }
6014
6015 @SuppressWarnings("serial") static final class MapReduceValuesTask<K,V,U>
6016 extends Traverser<K,V,U> {
6017 final Function<? super V, ? extends U> transformer;
6018 final BiFunction<? super U, ? super U, ? extends U> reducer;
6019 U result;
6020 MapReduceValuesTask<K,V,U> rights, nextRight;
6021 MapReduceValuesTask
6022 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6023 MapReduceValuesTask<K,V,U> nextRight,
6024 Function<? super V, ? extends U> transformer,
6025 BiFunction<? super U, ? super U, ? extends U> reducer) {
6026 super(m, p, b); this.nextRight = nextRight;
6027 this.transformer = transformer;
6028 this.reducer = reducer;
6029 }
6030 public final U getRawResult() { return result; }
6031 @SuppressWarnings("unchecked") public final void compute() {
6032 final Function<? super V, ? extends U> transformer;
6033 final BiFunction<? super U, ? super U, ? extends U> reducer;
6034 if ((transformer = this.transformer) != null &&
6035 (reducer = this.reducer) != null) {
6036 for (int b; (b = preSplit()) > 0;)
6037 (rights = new MapReduceValuesTask<K,V,U>
6038 (map, this, b, rights, transformer, reducer)).fork();
6039 U r = null, u;
6040 V v;
6041 while ((v = advance()) != null) {
6042 if ((u = transformer.apply(v)) != null)
6043 r = (r == null) ? u : reducer.apply(r, u);
6044 }
6045 result = r;
6046 CountedCompleter<?> c;
6047 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6048 MapReduceValuesTask<K,V,U>
6049 t = (MapReduceValuesTask<K,V,U>)c,
6050 s = t.rights;
6051 while (s != null) {
6052 U tr, sr;
6053 if ((sr = s.result) != null)
6054 t.result = (((tr = t.result) == null) ? sr :
6055 reducer.apply(tr, sr));
6056 s = t.rights = s.nextRight;
6057 }
6058 }
6059 }
6060 }
6061 }
6062
6063 @SuppressWarnings("serial") static final class MapReduceEntriesTask<K,V,U>
6064 extends Traverser<K,V,U> {
6065 final Function<Map.Entry<K,V>, ? extends U> transformer;
6066 final BiFunction<? super U, ? super U, ? extends U> reducer;
6067 U result;
6068 MapReduceEntriesTask<K,V,U> rights, nextRight;
6069 MapReduceEntriesTask
6070 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6071 MapReduceEntriesTask<K,V,U> nextRight,
6072 Function<Map.Entry<K,V>, ? extends U> transformer,
6073 BiFunction<? super U, ? super U, ? extends U> reducer) {
6074 super(m, p, b); this.nextRight = nextRight;
6075 this.transformer = transformer;
6076 this.reducer = reducer;
6077 }
6078 public final U getRawResult() { return result; }
6079 @SuppressWarnings("unchecked") public final void compute() {
6080 final Function<Map.Entry<K,V>, ? extends U> transformer;
6081 final BiFunction<? super U, ? super U, ? extends U> reducer;
6082 if ((transformer = this.transformer) != null &&
6083 (reducer = this.reducer) != null) {
6084 for (int b; (b = preSplit()) > 0;)
6085 (rights = new MapReduceEntriesTask<K,V,U>
6086 (map, this, b, rights, transformer, reducer)).fork();
6087 U r = null, u;
6088 V v;
6089 while ((v = advance()) != null) {
6090 if ((u = transformer.apply(entryFor((K)nextKey,
6091 v))) != null)
6092 r = (r == null) ? u : reducer.apply(r, u);
6093 }
6094 result = r;
6095 CountedCompleter<?> c;
6096 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6097 MapReduceEntriesTask<K,V,U>
6098 t = (MapReduceEntriesTask<K,V,U>)c,
6099 s = t.rights;
6100 while (s != null) {
6101 U tr, sr;
6102 if ((sr = s.result) != null)
6103 t.result = (((tr = t.result) == null) ? sr :
6104 reducer.apply(tr, sr));
6105 s = t.rights = s.nextRight;
6106 }
6107 }
6108 }
6109 }
6110 }
6111
6112 @SuppressWarnings("serial") static final class MapReduceMappingsTask<K,V,U>
6113 extends Traverser<K,V,U> {
6114 final BiFunction<? super K, ? super V, ? extends U> transformer;
6115 final BiFunction<? super U, ? super U, ? extends U> reducer;
6116 U result;
6117 MapReduceMappingsTask<K,V,U> rights, nextRight;
6118 MapReduceMappingsTask
6119 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6120 MapReduceMappingsTask<K,V,U> nextRight,
6121 BiFunction<? super K, ? super V, ? extends U> transformer,
6122 BiFunction<? super U, ? super U, ? extends U> reducer) {
6123 super(m, p, b); this.nextRight = nextRight;
6124 this.transformer = transformer;
6125 this.reducer = reducer;
6126 }
6127 public final U getRawResult() { return result; }
6128 @SuppressWarnings("unchecked") public final void compute() {
6129 final BiFunction<? super K, ? super V, ? extends U> transformer;
6130 final BiFunction<? super U, ? super U, ? extends U> reducer;
6131 if ((transformer = this.transformer) != null &&
6132 (reducer = this.reducer) != null) {
6133 for (int b; (b = preSplit()) > 0;)
6134 (rights = new MapReduceMappingsTask<K,V,U>
6135 (map, this, b, rights, transformer, reducer)).fork();
6136 U r = null, u;
6137 V v;
6138 while ((v = advance()) != null) {
6139 if ((u = transformer.apply((K)nextKey, v)) != null)
6140 r = (r == null) ? u : reducer.apply(r, u);
6141 }
6142 result = r;
6143 CountedCompleter<?> c;
6144 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6145 MapReduceMappingsTask<K,V,U>
6146 t = (MapReduceMappingsTask<K,V,U>)c,
6147 s = t.rights;
6148 while (s != null) {
6149 U tr, sr;
6150 if ((sr = s.result) != null)
6151 t.result = (((tr = t.result) == null) ? sr :
6152 reducer.apply(tr, sr));
6153 s = t.rights = s.nextRight;
6154 }
6155 }
6156 }
6157 }
6158 }
6159
6160 @SuppressWarnings("serial") static final class MapReduceKeysToDoubleTask<K,V>
6161 extends Traverser<K,V,Double> {
6162 final DoubleFunction<? super K> transformer;
6163 final DoubleBinaryOperator reducer;
6164 final double basis;
6165 double result;
6166 MapReduceKeysToDoubleTask<K,V> rights, nextRight;
6167 MapReduceKeysToDoubleTask
6168 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6169 MapReduceKeysToDoubleTask<K,V> nextRight,
6170 DoubleFunction<? super K> transformer,
6171 double basis,
6172 DoubleBinaryOperator reducer) {
6173 super(m, p, b); this.nextRight = nextRight;
6174 this.transformer = transformer;
6175 this.basis = basis; this.reducer = reducer;
6176 }
6177 public final Double getRawResult() { return result; }
6178 @SuppressWarnings("unchecked") public final void compute() {
6179 final DoubleFunction<? super K> transformer;
6180 final DoubleBinaryOperator reducer;
6181 if ((transformer = this.transformer) != null &&
6182 (reducer = this.reducer) != null) {
6183 double r = this.basis;
6184 for (int b; (b = preSplit()) > 0;)
6185 (rights = new MapReduceKeysToDoubleTask<K,V>
6186 (map, this, b, rights, transformer, r, reducer)).fork();
6187 while (advance() != null)
6188 r = reducer.applyAsDouble(r, transformer.applyAsDouble((K)nextKey));
6189 result = r;
6190 CountedCompleter<?> c;
6191 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6192 MapReduceKeysToDoubleTask<K,V>
6193 t = (MapReduceKeysToDoubleTask<K,V>)c,
6194 s = t.rights;
6195 while (s != null) {
6196 t.result = reducer.applyAsDouble(t.result, s.result);
6197 s = t.rights = s.nextRight;
6198 }
6199 }
6200 }
6201 }
6202 }
6203
6204 @SuppressWarnings("serial") static final class MapReduceValuesToDoubleTask<K,V>
6205 extends Traverser<K,V,Double> {
6206 final DoubleFunction<? super V> transformer;
6207 final DoubleBinaryOperator reducer;
6208 final double basis;
6209 double result;
6210 MapReduceValuesToDoubleTask<K,V> rights, nextRight;
6211 MapReduceValuesToDoubleTask
6212 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6213 MapReduceValuesToDoubleTask<K,V> nextRight,
6214 DoubleFunction<? super V> transformer,
6215 double basis,
6216 DoubleBinaryOperator reducer) {
6217 super(m, p, b); this.nextRight = nextRight;
6218 this.transformer = transformer;
6219 this.basis = basis; this.reducer = reducer;
6220 }
6221 public final Double getRawResult() { return result; }
6222 @SuppressWarnings("unchecked") public final void compute() {
6223 final DoubleFunction<? super V> transformer;
6224 final DoubleBinaryOperator reducer;
6225 if ((transformer = this.transformer) != null &&
6226 (reducer = this.reducer) != null) {
6227 double r = this.basis;
6228 for (int b; (b = preSplit()) > 0;)
6229 (rights = new MapReduceValuesToDoubleTask<K,V>
6230 (map, this, b, rights, transformer, r, reducer)).fork();
6231 V v;
6232 while ((v = advance()) != null)
6233 r = reducer.applyAsDouble(r, transformer.applyAsDouble(v));
6234 result = r;
6235 CountedCompleter<?> c;
6236 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6237 MapReduceValuesToDoubleTask<K,V>
6238 t = (MapReduceValuesToDoubleTask<K,V>)c,
6239 s = t.rights;
6240 while (s != null) {
6241 t.result = reducer.applyAsDouble(t.result, s.result);
6242 s = t.rights = s.nextRight;
6243 }
6244 }
6245 }
6246 }
6247 }
6248
6249 @SuppressWarnings("serial") static final class MapReduceEntriesToDoubleTask<K,V>
6250 extends Traverser<K,V,Double> {
6251 final DoubleFunction<Map.Entry<K,V>> transformer;
6252 final DoubleBinaryOperator reducer;
6253 final double basis;
6254 double result;
6255 MapReduceEntriesToDoubleTask<K,V> rights, nextRight;
6256 MapReduceEntriesToDoubleTask
6257 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6258 MapReduceEntriesToDoubleTask<K,V> nextRight,
6259 DoubleFunction<Map.Entry<K,V>> transformer,
6260 double basis,
6261 DoubleBinaryOperator reducer) {
6262 super(m, p, b); this.nextRight = nextRight;
6263 this.transformer = transformer;
6264 this.basis = basis; this.reducer = reducer;
6265 }
6266 public final Double getRawResult() { return result; }
6267 @SuppressWarnings("unchecked") public final void compute() {
6268 final DoubleFunction<Map.Entry<K,V>> transformer;
6269 final DoubleBinaryOperator reducer;
6270 if ((transformer = this.transformer) != null &&
6271 (reducer = this.reducer) != null) {
6272 double r = this.basis;
6273 for (int b; (b = preSplit()) > 0;)
6274 (rights = new MapReduceEntriesToDoubleTask<K,V>
6275 (map, this, b, rights, transformer, r, reducer)).fork();
6276 V v;
6277 while ((v = advance()) != null)
6278 r = reducer.applyAsDouble(r, transformer.applyAsDouble(entryFor((K)nextKey,
6279 v)));
6280 result = r;
6281 CountedCompleter<?> c;
6282 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6283 MapReduceEntriesToDoubleTask<K,V>
6284 t = (MapReduceEntriesToDoubleTask<K,V>)c,
6285 s = t.rights;
6286 while (s != null) {
6287 t.result = reducer.applyAsDouble(t.result, s.result);
6288 s = t.rights = s.nextRight;
6289 }
6290 }
6291 }
6292 }
6293 }
6294
6295 @SuppressWarnings("serial") static final class MapReduceMappingsToDoubleTask<K,V>
6296 extends Traverser<K,V,Double> {
6297 final DoubleBiFunction<? super K, ? super V> transformer;
6298 final DoubleBinaryOperator reducer;
6299 final double basis;
6300 double result;
6301 MapReduceMappingsToDoubleTask<K,V> rights, nextRight;
6302 MapReduceMappingsToDoubleTask
6303 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6304 MapReduceMappingsToDoubleTask<K,V> nextRight,
6305 DoubleBiFunction<? super K, ? super V> transformer,
6306 double basis,
6307 DoubleBinaryOperator reducer) {
6308 super(m, p, b); this.nextRight = nextRight;
6309 this.transformer = transformer;
6310 this.basis = basis; this.reducer = reducer;
6311 }
6312 public final Double getRawResult() { return result; }
6313 @SuppressWarnings("unchecked") public final void compute() {
6314 final DoubleBiFunction<? super K, ? super V> transformer;
6315 final DoubleBinaryOperator reducer;
6316 if ((transformer = this.transformer) != null &&
6317 (reducer = this.reducer) != null) {
6318 double r = this.basis;
6319 for (int b; (b = preSplit()) > 0;)
6320 (rights = new MapReduceMappingsToDoubleTask<K,V>
6321 (map, this, b, rights, transformer, r, reducer)).fork();
6322 V v;
6323 while ((v = advance()) != null)
6324 r = reducer.applyAsDouble(r, transformer.applyAsDouble((K)nextKey, v));
6325 result = r;
6326 CountedCompleter<?> c;
6327 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6328 MapReduceMappingsToDoubleTask<K,V>
6329 t = (MapReduceMappingsToDoubleTask<K,V>)c,
6330 s = t.rights;
6331 while (s != null) {
6332 t.result = reducer.applyAsDouble(t.result, s.result);
6333 s = t.rights = s.nextRight;
6334 }
6335 }
6336 }
6337 }
6338 }
6339
6340 @SuppressWarnings("serial") static final class MapReduceKeysToLongTask<K,V>
6341 extends Traverser<K,V,Long> {
6342 final LongFunction<? super K> transformer;
6343 final LongBinaryOperator reducer;
6344 final long basis;
6345 long result;
6346 MapReduceKeysToLongTask<K,V> rights, nextRight;
6347 MapReduceKeysToLongTask
6348 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6349 MapReduceKeysToLongTask<K,V> nextRight,
6350 LongFunction<? super K> transformer,
6351 long basis,
6352 LongBinaryOperator reducer) {
6353 super(m, p, b); this.nextRight = nextRight;
6354 this.transformer = transformer;
6355 this.basis = basis; this.reducer = reducer;
6356 }
6357 public final Long getRawResult() { return result; }
6358 @SuppressWarnings("unchecked") public final void compute() {
6359 final LongFunction<? super K> transformer;
6360 final LongBinaryOperator reducer;
6361 if ((transformer = this.transformer) != null &&
6362 (reducer = this.reducer) != null) {
6363 long r = this.basis;
6364 for (int b; (b = preSplit()) > 0;)
6365 (rights = new MapReduceKeysToLongTask<K,V>
6366 (map, this, b, rights, transformer, r, reducer)).fork();
6367 while (advance() != null)
6368 r = reducer.applyAsLong(r, transformer.applyAsLong((K)nextKey));
6369 result = r;
6370 CountedCompleter<?> c;
6371 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6372 MapReduceKeysToLongTask<K,V>
6373 t = (MapReduceKeysToLongTask<K,V>)c,
6374 s = t.rights;
6375 while (s != null) {
6376 t.result = reducer.applyAsLong(t.result, s.result);
6377 s = t.rights = s.nextRight;
6378 }
6379 }
6380 }
6381 }
6382 }
6383
6384 @SuppressWarnings("serial") static final class MapReduceValuesToLongTask<K,V>
6385 extends Traverser<K,V,Long> {
6386 final LongFunction<? super V> transformer;
6387 final LongBinaryOperator reducer;
6388 final long basis;
6389 long result;
6390 MapReduceValuesToLongTask<K,V> rights, nextRight;
6391 MapReduceValuesToLongTask
6392 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6393 MapReduceValuesToLongTask<K,V> nextRight,
6394 LongFunction<? super V> transformer,
6395 long basis,
6396 LongBinaryOperator reducer) {
6397 super(m, p, b); this.nextRight = nextRight;
6398 this.transformer = transformer;
6399 this.basis = basis; this.reducer = reducer;
6400 }
6401 public final Long getRawResult() { return result; }
6402 @SuppressWarnings("unchecked") public final void compute() {
6403 final LongFunction<? super V> transformer;
6404 final LongBinaryOperator reducer;
6405 if ((transformer = this.transformer) != null &&
6406 (reducer = this.reducer) != null) {
6407 long r = this.basis;
6408 for (int b; (b = preSplit()) > 0;)
6409 (rights = new MapReduceValuesToLongTask<K,V>
6410 (map, this, b, rights, transformer, r, reducer)).fork();
6411 V v;
6412 while ((v = advance()) != null)
6413 r = reducer.applyAsLong(r, transformer.applyAsLong(v));
6414 result = r;
6415 CountedCompleter<?> c;
6416 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6417 MapReduceValuesToLongTask<K,V>
6418 t = (MapReduceValuesToLongTask<K,V>)c,
6419 s = t.rights;
6420 while (s != null) {
6421 t.result = reducer.applyAsLong(t.result, s.result);
6422 s = t.rights = s.nextRight;
6423 }
6424 }
6425 }
6426 }
6427 }
6428
6429 @SuppressWarnings("serial") static final class MapReduceEntriesToLongTask<K,V>
6430 extends Traverser<K,V,Long> {
6431 final LongFunction<Map.Entry<K,V>> transformer;
6432 final LongBinaryOperator reducer;
6433 final long basis;
6434 long result;
6435 MapReduceEntriesToLongTask<K,V> rights, nextRight;
6436 MapReduceEntriesToLongTask
6437 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6438 MapReduceEntriesToLongTask<K,V> nextRight,
6439 LongFunction<Map.Entry<K,V>> transformer,
6440 long basis,
6441 LongBinaryOperator reducer) {
6442 super(m, p, b); this.nextRight = nextRight;
6443 this.transformer = transformer;
6444 this.basis = basis; this.reducer = reducer;
6445 }
6446 public final Long getRawResult() { return result; }
6447 @SuppressWarnings("unchecked") public final void compute() {
6448 final LongFunction<Map.Entry<K,V>> transformer;
6449 final LongBinaryOperator reducer;
6450 if ((transformer = this.transformer) != null &&
6451 (reducer = this.reducer) != null) {
6452 long r = this.basis;
6453 for (int b; (b = preSplit()) > 0;)
6454 (rights = new MapReduceEntriesToLongTask<K,V>
6455 (map, this, b, rights, transformer, r, reducer)).fork();
6456 V v;
6457 while ((v = advance()) != null)
6458 r = reducer.applyAsLong(r, transformer.applyAsLong(entryFor((K)nextKey, v)));
6459 result = r;
6460 CountedCompleter<?> c;
6461 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6462 MapReduceEntriesToLongTask<K,V>
6463 t = (MapReduceEntriesToLongTask<K,V>)c,
6464 s = t.rights;
6465 while (s != null) {
6466 t.result = reducer.applyAsLong(t.result, s.result);
6467 s = t.rights = s.nextRight;
6468 }
6469 }
6470 }
6471 }
6472 }
6473
6474 @SuppressWarnings("serial") static final class MapReduceMappingsToLongTask<K,V>
6475 extends Traverser<K,V,Long> {
6476 final LongBiFunction<? super K, ? super V> transformer;
6477 final LongBinaryOperator reducer;
6478 final long basis;
6479 long result;
6480 MapReduceMappingsToLongTask<K,V> rights, nextRight;
6481 MapReduceMappingsToLongTask
6482 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6483 MapReduceMappingsToLongTask<K,V> nextRight,
6484 LongBiFunction<? super K, ? super V> transformer,
6485 long basis,
6486 LongBinaryOperator reducer) {
6487 super(m, p, b); this.nextRight = nextRight;
6488 this.transformer = transformer;
6489 this.basis = basis; this.reducer = reducer;
6490 }
6491 public final Long getRawResult() { return result; }
6492 @SuppressWarnings("unchecked") public final void compute() {
6493 final LongBiFunction<? super K, ? super V> transformer;
6494 final LongBinaryOperator reducer;
6495 if ((transformer = this.transformer) != null &&
6496 (reducer = this.reducer) != null) {
6497 long r = this.basis;
6498 for (int b; (b = preSplit()) > 0;)
6499 (rights = new MapReduceMappingsToLongTask<K,V>
6500 (map, this, b, rights, transformer, r, reducer)).fork();
6501 V v;
6502 while ((v = advance()) != null)
6503 r = reducer.applyAsLong(r, transformer.applyAsLong((K)nextKey, v));
6504 result = r;
6505 CountedCompleter<?> c;
6506 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6507 MapReduceMappingsToLongTask<K,V>
6508 t = (MapReduceMappingsToLongTask<K,V>)c,
6509 s = t.rights;
6510 while (s != null) {
6511 t.result = reducer.applyAsLong(t.result, s.result);
6512 s = t.rights = s.nextRight;
6513 }
6514 }
6515 }
6516 }
6517 }
6518
6519 @SuppressWarnings("serial") static final class MapReduceKeysToIntTask<K,V>
6520 extends Traverser<K,V,Integer> {
6521 final IntFunction<? super K> transformer;
6522 final IntBinaryOperator reducer;
6523 final int basis;
6524 int result;
6525 MapReduceKeysToIntTask<K,V> rights, nextRight;
6526 MapReduceKeysToIntTask
6527 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6528 MapReduceKeysToIntTask<K,V> nextRight,
6529 IntFunction<? super K> transformer,
6530 int basis,
6531 IntBinaryOperator reducer) {
6532 super(m, p, b); this.nextRight = nextRight;
6533 this.transformer = transformer;
6534 this.basis = basis; this.reducer = reducer;
6535 }
6536 public final Integer getRawResult() { return result; }
6537 @SuppressWarnings("unchecked") public final void compute() {
6538 final IntFunction<? super K> transformer;
6539 final IntBinaryOperator reducer;
6540 if ((transformer = this.transformer) != null &&
6541 (reducer = this.reducer) != null) {
6542 int r = this.basis;
6543 for (int b; (b = preSplit()) > 0;)
6544 (rights = new MapReduceKeysToIntTask<K,V>
6545 (map, this, b, rights, transformer, r, reducer)).fork();
6546 while (advance() != null)
6547 r = reducer.applyAsInt(r, transformer.applyAsInt((K)nextKey));
6548 result = r;
6549 CountedCompleter<?> c;
6550 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6551 MapReduceKeysToIntTask<K,V>
6552 t = (MapReduceKeysToIntTask<K,V>)c,
6553 s = t.rights;
6554 while (s != null) {
6555 t.result = reducer.applyAsInt(t.result, s.result);
6556 s = t.rights = s.nextRight;
6557 }
6558 }
6559 }
6560 }
6561 }
6562
6563 @SuppressWarnings("serial") static final class MapReduceValuesToIntTask<K,V>
6564 extends Traverser<K,V,Integer> {
6565 final IntFunction<? super V> transformer;
6566 final IntBinaryOperator reducer;
6567 final int basis;
6568 int result;
6569 MapReduceValuesToIntTask<K,V> rights, nextRight;
6570 MapReduceValuesToIntTask
6571 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6572 MapReduceValuesToIntTask<K,V> nextRight,
6573 IntFunction<? super V> transformer,
6574 int basis,
6575 IntBinaryOperator reducer) {
6576 super(m, p, b); this.nextRight = nextRight;
6577 this.transformer = transformer;
6578 this.basis = basis; this.reducer = reducer;
6579 }
6580 public final Integer getRawResult() { return result; }
6581 @SuppressWarnings("unchecked") public final void compute() {
6582 final IntFunction<? super V> transformer;
6583 final IntBinaryOperator reducer;
6584 if ((transformer = this.transformer) != null &&
6585 (reducer = this.reducer) != null) {
6586 int r = this.basis;
6587 for (int b; (b = preSplit()) > 0;)
6588 (rights = new MapReduceValuesToIntTask<K,V>
6589 (map, this, b, rights, transformer, r, reducer)).fork();
6590 V v;
6591 while ((v = advance()) != null)
6592 r = reducer.applyAsInt(r, transformer.applyAsInt(v));
6593 result = r;
6594 CountedCompleter<?> c;
6595 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6596 MapReduceValuesToIntTask<K,V>
6597 t = (MapReduceValuesToIntTask<K,V>)c,
6598 s = t.rights;
6599 while (s != null) {
6600 t.result = reducer.applyAsInt(t.result, s.result);
6601 s = t.rights = s.nextRight;
6602 }
6603 }
6604 }
6605 }
6606 }
6607
6608 @SuppressWarnings("serial") static final class MapReduceEntriesToIntTask<K,V>
6609 extends Traverser<K,V,Integer> {
6610 final IntFunction<Map.Entry<K,V>> transformer;
6611 final IntBinaryOperator reducer;
6612 final int basis;
6613 int result;
6614 MapReduceEntriesToIntTask<K,V> rights, nextRight;
6615 MapReduceEntriesToIntTask
6616 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6617 MapReduceEntriesToIntTask<K,V> nextRight,
6618 IntFunction<Map.Entry<K,V>> transformer,
6619 int basis,
6620 IntBinaryOperator reducer) {
6621 super(m, p, b); this.nextRight = nextRight;
6622 this.transformer = transformer;
6623 this.basis = basis; this.reducer = reducer;
6624 }
6625 public final Integer getRawResult() { return result; }
6626 @SuppressWarnings("unchecked") public final void compute() {
6627 final IntFunction<Map.Entry<K,V>> transformer;
6628 final IntBinaryOperator reducer;
6629 if ((transformer = this.transformer) != null &&
6630 (reducer = this.reducer) != null) {
6631 int r = this.basis;
6632 for (int b; (b = preSplit()) > 0;)
6633 (rights = new MapReduceEntriesToIntTask<K,V>
6634 (map, this, b, rights, transformer, r, reducer)).fork();
6635 V v;
6636 while ((v = advance()) != null)
6637 r = reducer.applyAsInt(r, transformer.applyAsInt(entryFor((K)nextKey,
6638 v)));
6639 result = r;
6640 CountedCompleter<?> c;
6641 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6642 MapReduceEntriesToIntTask<K,V>
6643 t = (MapReduceEntriesToIntTask<K,V>)c,
6644 s = t.rights;
6645 while (s != null) {
6646 t.result = reducer.applyAsInt(t.result, s.result);
6647 s = t.rights = s.nextRight;
6648 }
6649 }
6650 }
6651 }
6652 }
6653
6654 @SuppressWarnings("serial") static final class MapReduceMappingsToIntTask<K,V>
6655 extends Traverser<K,V,Integer> {
6656 final IntBiFunction<? super K, ? super V> transformer;
6657 final IntBinaryOperator reducer;
6658 final int basis;
6659 int result;
6660 MapReduceMappingsToIntTask<K,V> rights, nextRight;
6661 MapReduceMappingsToIntTask
6662 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6663 MapReduceMappingsToIntTask<K,V> nextRight,
6664 IntBiFunction<? super K, ? super V> transformer,
6665 int basis,
6666 IntBinaryOperator reducer) {
6667 super(m, p, b); this.nextRight = nextRight;
6668 this.transformer = transformer;
6669 this.basis = basis; this.reducer = reducer;
6670 }
6671 public final Integer getRawResult() { return result; }
6672 @SuppressWarnings("unchecked") public final void compute() {
6673 final IntBiFunction<? super K, ? super V> transformer;
6674 final IntBinaryOperator reducer;
6675 if ((transformer = this.transformer) != null &&
6676 (reducer = this.reducer) != null) {
6677 int r = this.basis;
6678 for (int b; (b = preSplit()) > 0;)
6679 (rights = new MapReduceMappingsToIntTask<K,V>
6680 (map, this, b, rights, transformer, r, reducer)).fork();
6681 V v;
6682 while ((v = advance()) != null)
6683 r = reducer.applyAsInt(r, transformer.applyAsInt((K)nextKey, v));
6684 result = r;
6685 CountedCompleter<?> c;
6686 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6687 MapReduceMappingsToIntTask<K,V>
6688 t = (MapReduceMappingsToIntTask<K,V>)c,
6689 s = t.rights;
6690 while (s != null) {
6691 t.result = reducer.applyAsInt(t.result, s.result);
6692 s = t.rights = s.nextRight;
6693 }
6694 }
6695 }
6696 }
6697 }
6698
6699 // Unsafe mechanics
6700 private static final sun.misc.Unsafe U;
6701 private static final long SIZECTL;
6702 private static final long TRANSFERINDEX;
6703 private static final long TRANSFERORIGIN;
6704 private static final long BASECOUNT;
6705 private static final long CELLSBUSY;
6706 private static final long CELLVALUE;
6707 private static final long ABASE;
6708 private static final int ASHIFT;
6709
6710 static {
6711 int ss;
6712 try {
6713 U = sun.misc.Unsafe.getUnsafe();
6714 Class<?> k = ConcurrentHashMap.class;
6715 SIZECTL = U.objectFieldOffset
6716 (k.getDeclaredField("sizeCtl"));
6717 TRANSFERINDEX = U.objectFieldOffset
6718 (k.getDeclaredField("transferIndex"));
6719 TRANSFERORIGIN = U.objectFieldOffset
6720 (k.getDeclaredField("transferOrigin"));
6721 BASECOUNT = U.objectFieldOffset
6722 (k.getDeclaredField("baseCount"));
6723 CELLSBUSY = U.objectFieldOffset
6724 (k.getDeclaredField("cellsBusy"));
6725 Class<?> ck = Cell.class;
6726 CELLVALUE = U.objectFieldOffset
6727 (ck.getDeclaredField("value"));
6728 Class<?> sc = Node[].class;
6729 ABASE = U.arrayBaseOffset(sc);
6730 ss = U.arrayIndexScale(sc);
6731 ASHIFT = 31 - Integer.numberOfLeadingZeros(ss);
6732 } catch (Exception e) {
6733 throw new Error(e);
6734 }
6735 if ((ss & (ss-1)) != 0)
6736 throw new Error("data type scale not a power of two");
6737 }
6738
6739 }