ViewVC Help
View File | Revision Log | Show Annotations | Download File | Root Listing
root/jsr166/jsr166/src/main/java/util/concurrent/ConcurrentHashMap.java
Revision: 1.154
Committed: Thu Dec 27 20:29:07 2012 UTC (11 years, 5 months ago) by jsr166
Branch: MAIN
Changes since 1.153: +2 -2 lines
Log Message:
whitespace

File Contents

# Content
1 /*
2 * Written by Doug Lea with assistance from members of JCP JSR-166
3 * Expert Group and released to the public domain, as explained at
4 * http://creativecommons.org/publicdomain/zero/1.0/
5 */
6
7 package java.util.concurrent;
8 import java.util.concurrent.ForkJoinPool;
9 import java.util.concurrent.CountedCompleter;
10 import java.util.function.*;
11 import java.util.stream.Spliterator;
12 import java.util.stream.Stream;
13 import java.util.stream.Streams;
14
15 import java.util.Comparator;
16 import java.util.Arrays;
17 import java.util.Map;
18 import java.util.Set;
19 import java.util.Collection;
20 import java.util.AbstractMap;
21 import java.util.AbstractSet;
22 import java.util.AbstractCollection;
23 import java.util.Hashtable;
24 import java.util.HashMap;
25 import java.util.Iterator;
26 import java.util.Enumeration;
27 import java.util.ConcurrentModificationException;
28 import java.util.NoSuchElementException;
29 import java.util.concurrent.ConcurrentMap;
30 import java.util.concurrent.locks.AbstractQueuedSynchronizer;
31 import java.util.concurrent.atomic.AtomicInteger;
32 import java.util.concurrent.atomic.AtomicReference;
33 import java.io.Serializable;
34
35 /**
36 * A hash table supporting full concurrency of retrievals and
37 * high expected concurrency for updates. This class obeys the
38 * same functional specification as {@link java.util.Hashtable}, and
39 * includes versions of methods corresponding to each method of
40 * {@code Hashtable}. However, even though all operations are
41 * thread-safe, retrieval operations do <em>not</em> entail locking,
42 * and there is <em>not</em> any support for locking the entire table
43 * in a way that prevents all access. This class is fully
44 * interoperable with {@code Hashtable} in programs that rely on its
45 * thread safety but not on its synchronization details.
46 *
47 * <p>Retrieval operations (including {@code get}) generally do not
48 * block, so may overlap with update operations (including {@code put}
49 * and {@code remove}). Retrievals reflect the results of the most
50 * recently <em>completed</em> update operations holding upon their
51 * onset. (More formally, an update operation for a given key bears a
52 * <em>happens-before</em> relation with any (non-null) retrieval for
53 * that key reporting the updated value.) For aggregate operations
54 * such as {@code putAll} and {@code clear}, concurrent retrievals may
55 * reflect insertion or removal of only some entries. Similarly,
56 * Iterators and Enumerations return elements reflecting the state of
57 * the hash table at some point at or since the creation of the
58 * iterator/enumeration. They do <em>not</em> throw {@link
59 * ConcurrentModificationException}. However, iterators are designed
60 * to be used by only one thread at a time. Bear in mind that the
61 * results of aggregate status methods including {@code size}, {@code
62 * isEmpty}, and {@code containsValue} are typically useful only when
63 * a map is not undergoing concurrent updates in other threads.
64 * Otherwise the results of these methods reflect transient states
65 * that may be adequate for monitoring or estimation purposes, but not
66 * for program control.
67 *
68 * <p>The table is dynamically expanded when there are too many
69 * collisions (i.e., keys that have distinct hash codes but fall into
70 * the same slot modulo the table size), with the expected average
71 * effect of maintaining roughly two bins per mapping (corresponding
72 * to a 0.75 load factor threshold for resizing). There may be much
73 * variance around this average as mappings are added and removed, but
74 * overall, this maintains a commonly accepted time/space tradeoff for
75 * hash tables. However, resizing this or any other kind of hash
76 * table may be a relatively slow operation. When possible, it is a
77 * good idea to provide a size estimate as an optional {@code
78 * initialCapacity} constructor argument. An additional optional
79 * {@code loadFactor} constructor argument provides a further means of
80 * customizing initial table capacity by specifying the table density
81 * to be used in calculating the amount of space to allocate for the
82 * given number of elements. Also, for compatibility with previous
83 * versions of this class, constructors may optionally specify an
84 * expected {@code concurrencyLevel} as an additional hint for
85 * internal sizing. Note that using many keys with exactly the same
86 * {@code hashCode()} is a sure way to slow down performance of any
87 * hash table.
88 *
89 * <p>A {@link Set} projection of a ConcurrentHashMap may be created
90 * (using {@link #newKeySet()} or {@link #newKeySet(int)}), or viewed
91 * (using {@link #keySet(Object)} when only keys are of interest, and the
92 * mapped values are (perhaps transiently) not used or all take the
93 * same mapping value.
94 *
95 * <p>A ConcurrentHashMap can be used as scalable frequency map (a
96 * form of histogram or multiset) by using {@link
97 * java.util.concurrent.atomic.LongAdder} values and initializing via
98 * {@link #computeIfAbsent}. For example, to add a count to a {@code
99 * ConcurrentHashMap<String,LongAdder> freqs}, you can use {@code
100 * freqs.computeIfAbsent(k -> new LongAdder()).increment();}
101 *
102 * <p>This class and its views and iterators implement all of the
103 * <em>optional</em> methods of the {@link Map} and {@link Iterator}
104 * interfaces.
105 *
106 * <p>Like {@link Hashtable} but unlike {@link HashMap}, this class
107 * does <em>not</em> allow {@code null} to be used as a key or value.
108 *
109 * <p>ConcurrentHashMaps support sequential and parallel operations
110 * bulk operations. (Parallel forms use the {@link
111 * ForkJoinPool#commonPool()}). Tasks that may be used in other
112 * contexts are available in class {@link ForkJoinTasks}. These
113 * operations are designed to be safely, and often sensibly, applied
114 * even with maps that are being concurrently updated by other
115 * threads; for example, when computing a snapshot summary of the
116 * values in a shared registry. There are three kinds of operation,
117 * each with four forms, accepting functions with Keys, Values,
118 * Entries, and (Key, Value) arguments and/or return values. Because
119 * the elements of a ConcurrentHashMap are not ordered in any
120 * particular way, and may be processed in different orders in
121 * different parallel executions, the correctness of supplied
122 * functions should not depend on any ordering, or on any other
123 * objects or values that may transiently change while computation is
124 * in progress; and except for forEach actions, should ideally be
125 * side-effect-free.
126 *
127 * <ul>
128 * <li> forEach: Perform a given action on each element.
129 * A variant form applies a given transformation on each element
130 * before performing the action.</li>
131 *
132 * <li> search: Return the first available non-null result of
133 * applying a given function on each element; skipping further
134 * search when a result is found.</li>
135 *
136 * <li> reduce: Accumulate each element. The supplied reduction
137 * function cannot rely on ordering (more formally, it should be
138 * both associative and commutative). There are five variants:
139 *
140 * <ul>
141 *
142 * <li> Plain reductions. (There is not a form of this method for
143 * (key, value) function arguments since there is no corresponding
144 * return type.)</li>
145 *
146 * <li> Mapped reductions that accumulate the results of a given
147 * function applied to each element.</li>
148 *
149 * <li> Reductions to scalar doubles, longs, and ints, using a
150 * given basis value.</li>
151 *
152 * </li>
153 * </ul>
154 * </ul>
155 *
156 * <p>The concurrency properties of bulk operations follow
157 * from those of ConcurrentHashMap: Any non-null result returned
158 * from {@code get(key)} and related access methods bears a
159 * happens-before relation with the associated insertion or
160 * update. The result of any bulk operation reflects the
161 * composition of these per-element relations (but is not
162 * necessarily atomic with respect to the map as a whole unless it
163 * is somehow known to be quiescent). Conversely, because keys
164 * and values in the map are never null, null serves as a reliable
165 * atomic indicator of the current lack of any result. To
166 * maintain this property, null serves as an implicit basis for
167 * all non-scalar reduction operations. For the double, long, and
168 * int versions, the basis should be one that, when combined with
169 * any other value, returns that other value (more formally, it
170 * should be the identity element for the reduction). Most common
171 * reductions have these properties; for example, computing a sum
172 * with basis 0 or a minimum with basis MAX_VALUE.
173 *
174 * <p>Search and transformation functions provided as arguments
175 * should similarly return null to indicate the lack of any result
176 * (in which case it is not used). In the case of mapped
177 * reductions, this also enables transformations to serve as
178 * filters, returning null (or, in the case of primitive
179 * specializations, the identity basis) if the element should not
180 * be combined. You can create compound transformations and
181 * filterings by composing them yourself under this "null means
182 * there is nothing there now" rule before using them in search or
183 * reduce operations.
184 *
185 * <p>Methods accepting and/or returning Entry arguments maintain
186 * key-value associations. They may be useful for example when
187 * finding the key for the greatest value. Note that "plain" Entry
188 * arguments can be supplied using {@code new
189 * AbstractMap.SimpleEntry(k,v)}.
190 *
191 * <p>Bulk operations may complete abruptly, throwing an
192 * exception encountered in the application of a supplied
193 * function. Bear in mind when handling such exceptions that other
194 * concurrently executing functions could also have thrown
195 * exceptions, or would have done so if the first exception had
196 * not occurred.
197 *
198 * <p>Speedups for parallel compared to sequential forms are common
199 * but not guaranteed. Parallel operations involving brief functions
200 * on small maps may execute more slowly than sequential forms if the
201 * underlying work to parallelize the computation is more expensive
202 * than the computation itself. Similarly, parallelization may not
203 * lead to much actual parallelism if all processors are busy
204 * performing unrelated tasks.
205 *
206 * <p>All arguments to all task methods must be non-null.
207 *
208 * <p>This class is a member of the
209 * <a href="{@docRoot}/../technotes/guides/collections/index.html">
210 * Java Collections Framework</a>.
211 *
212 * @since 1.5
213 * @author Doug Lea
214 * @param <K> the type of keys maintained by this map
215 * @param <V> the type of mapped values
216 */
217 public class ConcurrentHashMap<K, V>
218 implements ConcurrentMap<K, V>, Serializable {
219 private static final long serialVersionUID = 7249069246763182397L;
220
221 /*
222 * Overview:
223 *
224 * The primary design goal of this hash table is to maintain
225 * concurrent readability (typically method get(), but also
226 * iterators and related methods) while minimizing update
227 * contention. Secondary goals are to keep space consumption about
228 * the same or better than java.util.HashMap, and to support high
229 * initial insertion rates on an empty table by many threads.
230 *
231 * Each key-value mapping is held in a Node. Because Node key
232 * fields can contain special values, they are defined using plain
233 * Object types (not type "K"). This leads to a lot of explicit
234 * casting (and many explicit warning suppressions to tell
235 * compilers not to complain about it). It also allows some of the
236 * public methods to be factored into a smaller number of internal
237 * methods (although sadly not so for the five variants of
238 * put-related operations). The validation-based approach
239 * explained below leads to a lot of code sprawl because
240 * retry-control precludes factoring into smaller methods.
241 *
242 * The table is lazily initialized to a power-of-two size upon the
243 * first insertion. Each bin in the table normally contains a
244 * list of Nodes (most often, the list has only zero or one Node).
245 * Table accesses require volatile/atomic reads, writes, and
246 * CASes. Because there is no other way to arrange this without
247 * adding further indirections, we use intrinsics
248 * (sun.misc.Unsafe) operations. The lists of nodes within bins
249 * are always accurately traversable under volatile reads, so long
250 * as lookups check hash code and non-nullness of value before
251 * checking key equality.
252 *
253 * We use the top (sign) bit of Node hash fields for control
254 * purposes -- it is available anyway because of addressing
255 * constraints. Nodes with negative hash fields are forwarding
256 * nodes to either TreeBins or resized tables. The lower 31 bits
257 * of each normal Node's hash field contain a transformation of
258 * the key's hash code.
259 *
260 * Insertion (via put or its variants) of the first node in an
261 * empty bin is performed by just CASing it to the bin. This is
262 * by far the most common case for put operations under most
263 * key/hash distributions. Other update operations (insert,
264 * delete, and replace) require locks. We do not want to waste
265 * the space required to associate a distinct lock object with
266 * each bin, so instead use the first node of a bin list itself as
267 * a lock. Locking support for these locks relies on builtin
268 * "synchronized" monitors.
269 *
270 * Using the first node of a list as a lock does not by itself
271 * suffice though: When a node is locked, any update must first
272 * validate that it is still the first node after locking it, and
273 * retry if not. Because new nodes are always appended to lists,
274 * once a node is first in a bin, it remains first until deleted
275 * or the bin becomes invalidated (upon resizing). However,
276 * operations that only conditionally update may inspect nodes
277 * until the point of update. This is a converse of sorts to the
278 * lazy locking technique described by Herlihy & Shavit.
279 *
280 * The main disadvantage of per-bin locks is that other update
281 * operations on other nodes in a bin list protected by the same
282 * lock can stall, for example when user equals() or mapping
283 * functions take a long time. However, statistically, under
284 * random hash codes, this is not a common problem. Ideally, the
285 * frequency of nodes in bins follows a Poisson distribution
286 * (http://en.wikipedia.org/wiki/Poisson_distribution) with a
287 * parameter of about 0.5 on average, given the resizing threshold
288 * of 0.75, although with a large variance because of resizing
289 * granularity. Ignoring variance, the expected occurrences of
290 * list size k are (exp(-0.5) * pow(0.5, k) / factorial(k)). The
291 * first values are:
292 *
293 * 0: 0.60653066
294 * 1: 0.30326533
295 * 2: 0.07581633
296 * 3: 0.01263606
297 * 4: 0.00157952
298 * 5: 0.00015795
299 * 6: 0.00001316
300 * 7: 0.00000094
301 * 8: 0.00000006
302 * more: less than 1 in ten million
303 *
304 * Lock contention probability for two threads accessing distinct
305 * elements is roughly 1 / (8 * #elements) under random hashes.
306 *
307 * Actual hash code distributions encountered in practice
308 * sometimes deviate significantly from uniform randomness. This
309 * includes the case when N > (1<<30), so some keys MUST collide.
310 * Similarly for dumb or hostile usages in which multiple keys are
311 * designed to have identical hash codes. Also, although we guard
312 * against the worst effects of this (see method spread), sets of
313 * hashes may differ only in bits that do not impact their bin
314 * index for a given power-of-two mask. So we use a secondary
315 * strategy that applies when the number of nodes in a bin exceeds
316 * a threshold, and at least one of the keys implements
317 * Comparable. These TreeBins use a balanced tree to hold nodes
318 * (a specialized form of red-black trees), bounding search time
319 * to O(log N). Each search step in a TreeBin is around twice as
320 * slow as in a regular list, but given that N cannot exceed
321 * (1<<64) (before running out of addresses) this bounds search
322 * steps, lock hold times, etc, to reasonable constants (roughly
323 * 100 nodes inspected per operation worst case) so long as keys
324 * are Comparable (which is very common -- String, Long, etc).
325 * TreeBin nodes (TreeNodes) also maintain the same "next"
326 * traversal pointers as regular nodes, so can be traversed in
327 * iterators in the same way.
328 *
329 * The table is resized when occupancy exceeds a percentage
330 * threshold (nominally, 0.75, but see below). Any thread
331 * noticing an overfull bin may assist in resizing after the
332 * initiating thread allocates and sets up the replacement
333 * array. However, rather than stalling, these other threads may
334 * proceed with insertions etc. The use of TreeBins shields us
335 * from the worst case effects of overfilling while resizes are in
336 * progress. Resizing proceeds by transferring bins, one by one,
337 * from the table to the next table. To enable concurrency, the
338 * next table must be (incrementally) prefilled with place-holders
339 * serving as reverse forwarders to the old table. Because we are
340 * using power-of-two expansion, the elements from each bin must
341 * either stay at same index, or move with a power of two
342 * offset. We eliminate unnecessary node creation by catching
343 * cases where old nodes can be reused because their next fields
344 * won't change. On average, only about one-sixth of them need
345 * cloning when a table doubles. The nodes they replace will be
346 * garbage collectable as soon as they are no longer referenced by
347 * any reader thread that may be in the midst of concurrently
348 * traversing table. Upon transfer, the old table bin contains
349 * only a special forwarding node (with hash field "MOVED") that
350 * contains the next table as its key. On encountering a
351 * forwarding node, access and update operations restart, using
352 * the new table.
353 *
354 * Each bin transfer requires its bin lock, which can stall
355 * waiting for locks while resizing. However, because other
356 * threads can join in and help resize rather than contend for
357 * locks, average aggregate waits become shorter as resizing
358 * progresses. The transfer operation must also ensure that all
359 * accessible bins in both the old and new table are usable by any
360 * traversal. This is arranged by proceeding from the last bin
361 * (table.length - 1) up towards the first. Upon seeing a
362 * forwarding node, traversals (see class Traverser) arrange to
363 * move to the new table without revisiting nodes. However, to
364 * ensure that no intervening nodes are skipped, bin splitting can
365 * only begin after the associated reverse-forwarders are in
366 * place.
367 *
368 * The traversal scheme also applies to partial traversals of
369 * ranges of bins (via an alternate Traverser constructor)
370 * to support partitioned aggregate operations. Also, read-only
371 * operations give up if ever forwarded to a null table, which
372 * provides support for shutdown-style clearing, which is also not
373 * currently implemented.
374 *
375 * Lazy table initialization minimizes footprint until first use,
376 * and also avoids resizings when the first operation is from a
377 * putAll, constructor with map argument, or deserialization.
378 * These cases attempt to override the initial capacity settings,
379 * but harmlessly fail to take effect in cases of races.
380 *
381 * The element count is maintained using a specialization of
382 * LongAdder. We need to incorporate a specialization rather than
383 * just use a LongAdder in order to access implicit
384 * contention-sensing that leads to creation of multiple
385 * Cells. The counter mechanics avoid contention on
386 * updates but can encounter cache thrashing if read too
387 * frequently during concurrent access. To avoid reading so often,
388 * resizing under contention is attempted only upon adding to a
389 * bin already holding two or more nodes. Under uniform hash
390 * distributions, the probability of this occurring at threshold
391 * is around 13%, meaning that only about 1 in 8 puts check
392 * threshold (and after resizing, many fewer do so). The bulk
393 * putAll operation further reduces contention by only committing
394 * count updates upon these size checks.
395 *
396 * Maintaining API and serialization compatibility with previous
397 * versions of this class introduces several oddities. Mainly: We
398 * leave untouched but unused constructor arguments refering to
399 * concurrencyLevel. We accept a loadFactor constructor argument,
400 * but apply it only to initial table capacity (which is the only
401 * time that we can guarantee to honor it.) We also declare an
402 * unused "Segment" class that is instantiated in minimal form
403 * only when serializing.
404 */
405
406 /* ---------------- Constants -------------- */
407
408 /**
409 * The largest possible table capacity. This value must be
410 * exactly 1<<30 to stay within Java array allocation and indexing
411 * bounds for power of two table sizes, and is further required
412 * because the top two bits of 32bit hash fields are used for
413 * control purposes.
414 */
415 private static final int MAXIMUM_CAPACITY = 1 << 30;
416
417 /**
418 * The default initial table capacity. Must be a power of 2
419 * (i.e., at least 1) and at most MAXIMUM_CAPACITY.
420 */
421 private static final int DEFAULT_CAPACITY = 16;
422
423 /**
424 * The largest possible (non-power of two) array size.
425 * Needed by toArray and related methods.
426 */
427 static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8;
428
429 /**
430 * The default concurrency level for this table. Unused but
431 * defined for compatibility with previous versions of this class.
432 */
433 private static final int DEFAULT_CONCURRENCY_LEVEL = 16;
434
435 /**
436 * The load factor for this table. Overrides of this value in
437 * constructors affect only the initial table capacity. The
438 * actual floating point value isn't normally used -- it is
439 * simpler to use expressions such as {@code n - (n >>> 2)} for
440 * the associated resizing threshold.
441 */
442 private static final float LOAD_FACTOR = 0.75f;
443
444 /**
445 * The bin count threshold for using a tree rather than list for a
446 * bin. The value reflects the approximate break-even point for
447 * using tree-based operations.
448 */
449 private static final int TREE_THRESHOLD = 8;
450
451 /**
452 * Minimum number of rebinnings per transfer step. Ranges are
453 * subdivided to allow multiple resizer threads. This value
454 * serves as a lower bound to avoid resizers encountering
455 * excessive memory contention. The value should be at least
456 * DEFAULT_CAPACITY.
457 */
458 private static final int MIN_TRANSFER_STRIDE = 16;
459
460 /*
461 * Encodings for Node hash fields. See above for explanation.
462 */
463 static final int MOVED = 0x80000000; // hash field for forwarding nodes
464 static final int HASH_BITS = 0x7fffffff; // usable bits of normal node hash
465
466 /** Number of CPUS, to place bounds on some sizings */
467 static final int NCPU = Runtime.getRuntime().availableProcessors();
468
469 /* ---------------- Counters -------------- */
470
471 // Adapted from LongAdder and Striped64.
472 // See their internal docs for explanation.
473
474 // A padded cell for distributing counts
475 static final class Cell {
476 volatile long p0, p1, p2, p3, p4, p5, p6;
477 volatile long value;
478 volatile long q0, q1, q2, q3, q4, q5, q6;
479 Cell(long x) { value = x; }
480 }
481
482 /**
483 * Holder for the thread-local hash code determining which
484 * Cell to use. The code is initialized via the
485 * cellHashCodeGenerator, but may be moved upon collisions.
486 */
487 static final class CellHashCode {
488 int code;
489 }
490
491 /**
492 * Generates initial value for per-thread CellHashCodes
493 */
494 static final AtomicInteger cellHashCodeGenerator = new AtomicInteger();
495
496 /**
497 * Increment for cellHashCodeGenerator. See class ThreadLocal
498 * for explanation.
499 */
500 static final int SEED_INCREMENT = 0x61c88647;
501
502 /**
503 * Per-thread counter hash codes. Shared across all instances
504 */
505 static final ThreadLocal<CellHashCode> threadCellHashCode =
506 new ThreadLocal<CellHashCode>();
507
508 /* ---------------- Fields -------------- */
509
510 /**
511 * The array of bins. Lazily initialized upon first insertion.
512 * Size is always a power of two. Accessed directly by iterators.
513 */
514 transient volatile Node<V>[] table;
515
516 /**
517 * The next table to use; non-null only while resizing.
518 */
519 private transient volatile Node<V>[] nextTable;
520
521 /**
522 * Base counter value, used mainly when there is no contention,
523 * but also as a fallback during table initialization
524 * races. Updated via CAS.
525 */
526 private transient volatile long baseCount;
527
528 /**
529 * Table initialization and resizing control. When negative, the
530 * table is being initialized or resized: -1 for initialization,
531 * else -(1 + the number of active resizing threads). Otherwise,
532 * when table is null, holds the initial table size to use upon
533 * creation, or 0 for default. After initialization, holds the
534 * next element count value upon which to resize the table.
535 */
536 private transient volatile int sizeCtl;
537
538 /**
539 * The next table index (plus one) to split while resizing.
540 */
541 private transient volatile int transferIndex;
542
543 /**
544 * The least available table index to split while resizing.
545 */
546 private transient volatile int transferOrigin;
547
548 /**
549 * Spinlock (locked via CAS) used when resizing and/or creating Cells.
550 */
551 private transient volatile int cellsBusy;
552
553 /**
554 * Table of counter cells. When non-null, size is a power of 2.
555 */
556 private transient volatile Cell[] counterCells;
557
558 // views
559 private transient KeySetView<K,V> keySet;
560 private transient ValuesView<K,V> values;
561 private transient EntrySetView<K,V> entrySet;
562
563 /** For serialization compatibility. Null unless serialized; see below */
564 private Segment<K,V>[] segments;
565
566 /* ---------------- Table element access -------------- */
567
568 /*
569 * Volatile access methods are used for table elements as well as
570 * elements of in-progress next table while resizing. Uses are
571 * null checked by callers, and implicitly bounds-checked, relying
572 * on the invariants that tab arrays have non-zero size, and all
573 * indices are masked with (tab.length - 1) which is never
574 * negative and always less than length. Note that, to be correct
575 * wrt arbitrary concurrency errors by users, bounds checks must
576 * operate on local variables, which accounts for some odd-looking
577 * inline assignments below.
578 */
579
580 @SuppressWarnings("unchecked") static final <V> Node<V> tabAt
581 (Node<V>[] tab, int i) { // used by Traverser
582 return (Node<V>)U.getObjectVolatile(tab, ((long)i << ASHIFT) + ABASE);
583 }
584
585 private static final <V> boolean casTabAt
586 (Node<V>[] tab, int i, Node<V> c, Node<V> v) {
587 return U.compareAndSwapObject(tab, ((long)i << ASHIFT) + ABASE, c, v);
588 }
589
590 private static final <V> void setTabAt
591 (Node<V>[] tab, int i, Node<V> v) {
592 U.putObjectVolatile(tab, ((long)i << ASHIFT) + ABASE, v);
593 }
594
595 /* ---------------- Nodes -------------- */
596
597 /**
598 * Key-value entry. Note that this is never exported out as a
599 * user-visible Map.Entry (see MapEntry below). Nodes with a hash
600 * field of MOVED are special, and do not contain user keys or
601 * values. Otherwise, keys are never null, and null val fields
602 * indicate that a node is in the process of being deleted or
603 * created. For purposes of read-only access, a key may be read
604 * before a val, but can only be used after checking val to be
605 * non-null.
606 */
607 static class Node<V> {
608 final int hash;
609 final Object key;
610 volatile V val;
611 volatile Node<V> next;
612
613 Node(int hash, Object key, V val, Node<V> next) {
614 this.hash = hash;
615 this.key = key;
616 this.val = val;
617 this.next = next;
618 }
619 }
620
621 /* ---------------- TreeBins -------------- */
622
623 /**
624 * Nodes for use in TreeBins
625 */
626 static final class TreeNode<V> extends Node<V> {
627 TreeNode<V> parent; // red-black tree links
628 TreeNode<V> left;
629 TreeNode<V> right;
630 TreeNode<V> prev; // needed to unlink next upon deletion
631 boolean red;
632
633 TreeNode(int hash, Object key, V val, Node<V> next, TreeNode<V> parent) {
634 super(hash, key, val, next);
635 this.parent = parent;
636 }
637 }
638
639 /**
640 * A specialized form of red-black tree for use in bins
641 * whose size exceeds a threshold.
642 *
643 * TreeBins use a special form of comparison for search and
644 * related operations (which is the main reason we cannot use
645 * existing collections such as TreeMaps). TreeBins contain
646 * Comparable elements, but may contain others, as well as
647 * elements that are Comparable but not necessarily Comparable<T>
648 * for the same T, so we cannot invoke compareTo among them. To
649 * handle this, the tree is ordered primarily by hash value, then
650 * by getClass().getName() order, and then by Comparator order
651 * among elements of the same class. On lookup at a node, if
652 * elements are not comparable or compare as 0, both left and
653 * right children may need to be searched in the case of tied hash
654 * values. (This corresponds to the full list search that would be
655 * necessary if all elements were non-Comparable and had tied
656 * hashes.) The red-black balancing code is updated from
657 * pre-jdk-collections
658 * (http://gee.cs.oswego.edu/dl/classes/collections/RBCell.java)
659 * based in turn on Cormen, Leiserson, and Rivest "Introduction to
660 * Algorithms" (CLR).
661 *
662 * TreeBins also maintain a separate locking discipline than
663 * regular bins. Because they are forwarded via special MOVED
664 * nodes at bin heads (which can never change once established),
665 * we cannot use those nodes as locks. Instead, TreeBin
666 * extends AbstractQueuedSynchronizer to support a simple form of
667 * read-write lock. For update operations and table validation,
668 * the exclusive form of lock behaves in the same way as bin-head
669 * locks. However, lookups use shared read-lock mechanics to allow
670 * multiple readers in the absence of writers. Additionally,
671 * these lookups do not ever block: While the lock is not
672 * available, they proceed along the slow traversal path (via
673 * next-pointers) until the lock becomes available or the list is
674 * exhausted, whichever comes first. (These cases are not fast,
675 * but maximize aggregate expected throughput.) The AQS mechanics
676 * for doing this are straightforward. The lock state is held as
677 * AQS getState(). Read counts are negative; the write count (1)
678 * is positive. There are no signalling preferences among readers
679 * and writers. Since we don't need to export full Lock API, we
680 * just override the minimal AQS methods and use them directly.
681 */
682 static final class TreeBin<V> extends AbstractQueuedSynchronizer {
683 private static final long serialVersionUID = 2249069246763182397L;
684 transient TreeNode<V> root; // root of tree
685 transient TreeNode<V> first; // head of next-pointer list
686
687 /* AQS overrides */
688 public final boolean isHeldExclusively() { return getState() > 0; }
689 public final boolean tryAcquire(int ignore) {
690 if (compareAndSetState(0, 1)) {
691 setExclusiveOwnerThread(Thread.currentThread());
692 return true;
693 }
694 return false;
695 }
696 public final boolean tryRelease(int ignore) {
697 setExclusiveOwnerThread(null);
698 setState(0);
699 return true;
700 }
701 public final int tryAcquireShared(int ignore) {
702 for (int c;;) {
703 if ((c = getState()) > 0)
704 return -1;
705 if (compareAndSetState(c, c -1))
706 return 1;
707 }
708 }
709 public final boolean tryReleaseShared(int ignore) {
710 int c;
711 do {} while (!compareAndSetState(c = getState(), c + 1));
712 return c == -1;
713 }
714
715 /** From CLR */
716 private void rotateLeft(TreeNode<V> p) {
717 if (p != null) {
718 TreeNode<V> r = p.right, pp, rl;
719 if ((rl = p.right = r.left) != null)
720 rl.parent = p;
721 if ((pp = r.parent = p.parent) == null)
722 root = r;
723 else if (pp.left == p)
724 pp.left = r;
725 else
726 pp.right = r;
727 r.left = p;
728 p.parent = r;
729 }
730 }
731
732 /** From CLR */
733 private void rotateRight(TreeNode<V> p) {
734 if (p != null) {
735 TreeNode<V> l = p.left, pp, lr;
736 if ((lr = p.left = l.right) != null)
737 lr.parent = p;
738 if ((pp = l.parent = p.parent) == null)
739 root = l;
740 else if (pp.right == p)
741 pp.right = l;
742 else
743 pp.left = l;
744 l.right = p;
745 p.parent = l;
746 }
747 }
748
749 /**
750 * Returns the TreeNode (or null if not found) for the given key
751 * starting at given root.
752 */
753 @SuppressWarnings("unchecked") final TreeNode<V> getTreeNode
754 (int h, Object k, TreeNode<V> p) {
755 Class<?> c = k.getClass();
756 while (p != null) {
757 int dir, ph; Object pk; Class<?> pc;
758 if ((ph = p.hash) == h) {
759 if ((pk = p.key) == k || k.equals(pk))
760 return p;
761 if (c != (pc = pk.getClass()) ||
762 !(k instanceof Comparable) ||
763 (dir = ((Comparable)k).compareTo((Comparable)pk)) == 0) {
764 if ((dir = (c == pc) ? 0 :
765 c.getName().compareTo(pc.getName())) == 0) {
766 TreeNode<V> r = null, pl, pr; // check both sides
767 if ((pr = p.right) != null && h >= pr.hash &&
768 (r = getTreeNode(h, k, pr)) != null)
769 return r;
770 else if ((pl = p.left) != null && h <= pl.hash)
771 dir = -1;
772 else // nothing there
773 return null;
774 }
775 }
776 }
777 else
778 dir = (h < ph) ? -1 : 1;
779 p = (dir > 0) ? p.right : p.left;
780 }
781 return null;
782 }
783
784 /**
785 * Wrapper for getTreeNode used by CHM.get. Tries to obtain
786 * read-lock to call getTreeNode, but during failure to get
787 * lock, searches along next links.
788 */
789 final V getValue(int h, Object k) {
790 Node<V> r = null;
791 int c = getState(); // Must read lock state first
792 for (Node<V> e = first; e != null; e = e.next) {
793 if (c <= 0 && compareAndSetState(c, c - 1)) {
794 try {
795 r = getTreeNode(h, k, root);
796 } finally {
797 releaseShared(0);
798 }
799 break;
800 }
801 else if (e.hash == h && k.equals(e.key)) {
802 r = e;
803 break;
804 }
805 else
806 c = getState();
807 }
808 return r == null ? null : r.val;
809 }
810
811 /**
812 * Finds or adds a node.
813 * @return null if added
814 */
815 @SuppressWarnings("unchecked") final TreeNode<V> putTreeNode
816 (int h, Object k, V v) {
817 Class<?> c = k.getClass();
818 TreeNode<V> pp = root, p = null;
819 int dir = 0;
820 while (pp != null) { // find existing node or leaf to insert at
821 int ph; Object pk; Class<?> pc;
822 p = pp;
823 if ((ph = p.hash) == h) {
824 if ((pk = p.key) == k || k.equals(pk))
825 return p;
826 if (c != (pc = pk.getClass()) ||
827 !(k instanceof Comparable) ||
828 (dir = ((Comparable)k).compareTo((Comparable)pk)) == 0) {
829 TreeNode<V> s = null, r = null, pr;
830 if ((dir = (c == pc) ? 0 :
831 c.getName().compareTo(pc.getName())) == 0) {
832 if ((pr = p.right) != null && h >= pr.hash &&
833 (r = getTreeNode(h, k, pr)) != null)
834 return r;
835 else // continue left
836 dir = -1;
837 }
838 else if ((pr = p.right) != null && h >= pr.hash)
839 s = pr;
840 if (s != null && (r = getTreeNode(h, k, s)) != null)
841 return r;
842 }
843 }
844 else
845 dir = (h < ph) ? -1 : 1;
846 pp = (dir > 0) ? p.right : p.left;
847 }
848
849 TreeNode<V> f = first;
850 TreeNode<V> x = first = new TreeNode<V>(h, k, v, f, p);
851 if (p == null)
852 root = x;
853 else { // attach and rebalance; adapted from CLR
854 TreeNode<V> xp, xpp;
855 if (f != null)
856 f.prev = x;
857 if (dir <= 0)
858 p.left = x;
859 else
860 p.right = x;
861 x.red = true;
862 while (x != null && (xp = x.parent) != null && xp.red &&
863 (xpp = xp.parent) != null) {
864 TreeNode<V> xppl = xpp.left;
865 if (xp == xppl) {
866 TreeNode<V> y = xpp.right;
867 if (y != null && y.red) {
868 y.red = false;
869 xp.red = false;
870 xpp.red = true;
871 x = xpp;
872 }
873 else {
874 if (x == xp.right) {
875 rotateLeft(x = xp);
876 xpp = (xp = x.parent) == null ? null : xp.parent;
877 }
878 if (xp != null) {
879 xp.red = false;
880 if (xpp != null) {
881 xpp.red = true;
882 rotateRight(xpp);
883 }
884 }
885 }
886 }
887 else {
888 TreeNode<V> y = xppl;
889 if (y != null && y.red) {
890 y.red = false;
891 xp.red = false;
892 xpp.red = true;
893 x = xpp;
894 }
895 else {
896 if (x == xp.left) {
897 rotateRight(x = xp);
898 xpp = (xp = x.parent) == null ? null : xp.parent;
899 }
900 if (xp != null) {
901 xp.red = false;
902 if (xpp != null) {
903 xpp.red = true;
904 rotateLeft(xpp);
905 }
906 }
907 }
908 }
909 }
910 TreeNode<V> r = root;
911 if (r != null && r.red)
912 r.red = false;
913 }
914 return null;
915 }
916
917 /**
918 * Removes the given node, that must be present before this
919 * call. This is messier than typical red-black deletion code
920 * because we cannot swap the contents of an interior node
921 * with a leaf successor that is pinned by "next" pointers
922 * that are accessible independently of lock. So instead we
923 * swap the tree linkages.
924 */
925 final void deleteTreeNode(TreeNode<V> p) {
926 TreeNode<V> next = (TreeNode<V>)p.next; // unlink traversal pointers
927 TreeNode<V> pred = p.prev;
928 if (pred == null)
929 first = next;
930 else
931 pred.next = next;
932 if (next != null)
933 next.prev = pred;
934 TreeNode<V> replacement;
935 TreeNode<V> pl = p.left;
936 TreeNode<V> pr = p.right;
937 if (pl != null && pr != null) {
938 TreeNode<V> s = pr, sl;
939 while ((sl = s.left) != null) // find successor
940 s = sl;
941 boolean c = s.red; s.red = p.red; p.red = c; // swap colors
942 TreeNode<V> sr = s.right;
943 TreeNode<V> pp = p.parent;
944 if (s == pr) { // p was s's direct parent
945 p.parent = s;
946 s.right = p;
947 }
948 else {
949 TreeNode<V> sp = s.parent;
950 if ((p.parent = sp) != null) {
951 if (s == sp.left)
952 sp.left = p;
953 else
954 sp.right = p;
955 }
956 if ((s.right = pr) != null)
957 pr.parent = s;
958 }
959 p.left = null;
960 if ((p.right = sr) != null)
961 sr.parent = p;
962 if ((s.left = pl) != null)
963 pl.parent = s;
964 if ((s.parent = pp) == null)
965 root = s;
966 else if (p == pp.left)
967 pp.left = s;
968 else
969 pp.right = s;
970 replacement = sr;
971 }
972 else
973 replacement = (pl != null) ? pl : pr;
974 TreeNode<V> pp = p.parent;
975 if (replacement == null) {
976 if (pp == null) {
977 root = null;
978 return;
979 }
980 replacement = p;
981 }
982 else {
983 replacement.parent = pp;
984 if (pp == null)
985 root = replacement;
986 else if (p == pp.left)
987 pp.left = replacement;
988 else
989 pp.right = replacement;
990 p.left = p.right = p.parent = null;
991 }
992 if (!p.red) { // rebalance, from CLR
993 TreeNode<V> x = replacement;
994 while (x != null) {
995 TreeNode<V> xp, xpl;
996 if (x.red || (xp = x.parent) == null) {
997 x.red = false;
998 break;
999 }
1000 if (x == (xpl = xp.left)) {
1001 TreeNode<V> sib = xp.right;
1002 if (sib != null && sib.red) {
1003 sib.red = false;
1004 xp.red = true;
1005 rotateLeft(xp);
1006 sib = (xp = x.parent) == null ? null : xp.right;
1007 }
1008 if (sib == null)
1009 x = xp;
1010 else {
1011 TreeNode<V> sl = sib.left, sr = sib.right;
1012 if ((sr == null || !sr.red) &&
1013 (sl == null || !sl.red)) {
1014 sib.red = true;
1015 x = xp;
1016 }
1017 else {
1018 if (sr == null || !sr.red) {
1019 if (sl != null)
1020 sl.red = false;
1021 sib.red = true;
1022 rotateRight(sib);
1023 sib = (xp = x.parent) == null ?
1024 null : xp.right;
1025 }
1026 if (sib != null) {
1027 sib.red = (xp == null) ? false : xp.red;
1028 if ((sr = sib.right) != null)
1029 sr.red = false;
1030 }
1031 if (xp != null) {
1032 xp.red = false;
1033 rotateLeft(xp);
1034 }
1035 x = root;
1036 }
1037 }
1038 }
1039 else { // symmetric
1040 TreeNode<V> sib = xpl;
1041 if (sib != null && sib.red) {
1042 sib.red = false;
1043 xp.red = true;
1044 rotateRight(xp);
1045 sib = (xp = x.parent) == null ? null : xp.left;
1046 }
1047 if (sib == null)
1048 x = xp;
1049 else {
1050 TreeNode<V> sl = sib.left, sr = sib.right;
1051 if ((sl == null || !sl.red) &&
1052 (sr == null || !sr.red)) {
1053 sib.red = true;
1054 x = xp;
1055 }
1056 else {
1057 if (sl == null || !sl.red) {
1058 if (sr != null)
1059 sr.red = false;
1060 sib.red = true;
1061 rotateLeft(sib);
1062 sib = (xp = x.parent) == null ?
1063 null : xp.left;
1064 }
1065 if (sib != null) {
1066 sib.red = (xp == null) ? false : xp.red;
1067 if ((sl = sib.left) != null)
1068 sl.red = false;
1069 }
1070 if (xp != null) {
1071 xp.red = false;
1072 rotateRight(xp);
1073 }
1074 x = root;
1075 }
1076 }
1077 }
1078 }
1079 }
1080 if (p == replacement && (pp = p.parent) != null) {
1081 if (p == pp.left) // detach pointers
1082 pp.left = null;
1083 else if (p == pp.right)
1084 pp.right = null;
1085 p.parent = null;
1086 }
1087 }
1088 }
1089
1090 /* ---------------- Collision reduction methods -------------- */
1091
1092 /**
1093 * Spreads higher bits to lower, and also forces top bit to 0.
1094 * Because the table uses power-of-two masking, sets of hashes
1095 * that vary only in bits above the current mask will always
1096 * collide. (Among known examples are sets of Float keys holding
1097 * consecutive whole numbers in small tables.) To counter this,
1098 * we apply a transform that spreads the impact of higher bits
1099 * downward. There is a tradeoff between speed, utility, and
1100 * quality of bit-spreading. Because many common sets of hashes
1101 * are already reasonably distributed across bits (so don't benefit
1102 * from spreading), and because we use trees to handle large sets
1103 * of collisions in bins, we don't need excessively high quality.
1104 */
1105 private static final int spread(int h) {
1106 h ^= (h >>> 18) ^ (h >>> 12);
1107 return (h ^ (h >>> 10)) & HASH_BITS;
1108 }
1109
1110 /**
1111 * Replaces a list bin with a tree bin if key is comparable. Call
1112 * only when locked.
1113 */
1114 private final void replaceWithTreeBin(Node<V>[] tab, int index, Object key) {
1115 if (key instanceof Comparable) {
1116 TreeBin<V> t = new TreeBin<V>();
1117 for (Node<V> e = tabAt(tab, index); e != null; e = e.next)
1118 t.putTreeNode(e.hash, e.key, e.val);
1119 setTabAt(tab, index, new Node<V>(MOVED, t, null, null));
1120 }
1121 }
1122
1123 /* ---------------- Internal access and update methods -------------- */
1124
1125 /** Implementation for get and containsKey */
1126 @SuppressWarnings("unchecked") private final V internalGet(Object k) {
1127 int h = spread(k.hashCode());
1128 retry: for (Node<V>[] tab = table; tab != null;) {
1129 Node<V> e; Object ek; V ev; int eh; // locals to read fields once
1130 for (e = tabAt(tab, (tab.length - 1) & h); e != null; e = e.next) {
1131 if ((eh = e.hash) < 0) {
1132 if ((ek = e.key) instanceof TreeBin) // search TreeBin
1133 return ((TreeBin<V>)ek).getValue(h, k);
1134 else { // restart with new table
1135 tab = (Node<V>[])ek;
1136 continue retry;
1137 }
1138 }
1139 else if (eh == h && (ev = e.val) != null &&
1140 ((ek = e.key) == k || k.equals(ek)))
1141 return ev;
1142 }
1143 break;
1144 }
1145 return null;
1146 }
1147
1148 /**
1149 * Implementation for the four public remove/replace methods:
1150 * Replaces node value with v, conditional upon match of cv if
1151 * non-null. If resulting value is null, delete.
1152 */
1153 @SuppressWarnings("unchecked") private final V internalReplace
1154 (Object k, V v, Object cv) {
1155 int h = spread(k.hashCode());
1156 V oldVal = null;
1157 for (Node<V>[] tab = table;;) {
1158 Node<V> f; int i, fh; Object fk;
1159 if (tab == null ||
1160 (f = tabAt(tab, i = (tab.length - 1) & h)) == null)
1161 break;
1162 else if ((fh = f.hash) < 0) {
1163 if ((fk = f.key) instanceof TreeBin) {
1164 TreeBin<V> t = (TreeBin<V>)fk;
1165 boolean validated = false;
1166 boolean deleted = false;
1167 t.acquire(0);
1168 try {
1169 if (tabAt(tab, i) == f) {
1170 validated = true;
1171 TreeNode<V> p = t.getTreeNode(h, k, t.root);
1172 if (p != null) {
1173 V pv = p.val;
1174 if (cv == null || cv == pv || cv.equals(pv)) {
1175 oldVal = pv;
1176 if ((p.val = v) == null) {
1177 deleted = true;
1178 t.deleteTreeNode(p);
1179 }
1180 }
1181 }
1182 }
1183 } finally {
1184 t.release(0);
1185 }
1186 if (validated) {
1187 if (deleted)
1188 addCount(-1L, -1);
1189 break;
1190 }
1191 }
1192 else
1193 tab = (Node<V>[])fk;
1194 }
1195 else if (fh != h && f.next == null) // precheck
1196 break; // rules out possible existence
1197 else {
1198 boolean validated = false;
1199 boolean deleted = false;
1200 synchronized (f) {
1201 if (tabAt(tab, i) == f) {
1202 validated = true;
1203 for (Node<V> e = f, pred = null;;) {
1204 Object ek; V ev;
1205 if (e.hash == h &&
1206 ((ev = e.val) != null) &&
1207 ((ek = e.key) == k || k.equals(ek))) {
1208 if (cv == null || cv == ev || cv.equals(ev)) {
1209 oldVal = ev;
1210 if ((e.val = v) == null) {
1211 deleted = true;
1212 Node<V> en = e.next;
1213 if (pred != null)
1214 pred.next = en;
1215 else
1216 setTabAt(tab, i, en);
1217 }
1218 }
1219 break;
1220 }
1221 pred = e;
1222 if ((e = e.next) == null)
1223 break;
1224 }
1225 }
1226 }
1227 if (validated) {
1228 if (deleted)
1229 addCount(-1L, -1);
1230 break;
1231 }
1232 }
1233 }
1234 return oldVal;
1235 }
1236
1237 /*
1238 * Internal versions of insertion methods
1239 * All have the same basic structure as the first (internalPut):
1240 * 1. If table uninitialized, create
1241 * 2. If bin empty, try to CAS new node
1242 * 3. If bin stale, use new table
1243 * 4. if bin converted to TreeBin, validate and relay to TreeBin methods
1244 * 5. Lock and validate; if valid, scan and add or update
1245 *
1246 * The putAll method differs mainly in attempting to pre-allocate
1247 * enough table space, and also more lazily performs count updates
1248 * and checks.
1249 *
1250 * Most of the function-accepting methods can't be factored nicely
1251 * because they require different functional forms, so instead
1252 * sprawl out similar mechanics.
1253 */
1254
1255 /** Implementation for put and putIfAbsent */
1256 @SuppressWarnings("unchecked") private final V internalPut
1257 (K k, V v, boolean onlyIfAbsent) {
1258 if (k == null || v == null) throw new NullPointerException();
1259 int h = spread(k.hashCode());
1260 int len = 0;
1261 for (Node<V>[] tab = table;;) {
1262 int i, fh; Node<V> f; Object fk; V fv;
1263 if (tab == null)
1264 tab = initTable();
1265 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1266 if (casTabAt(tab, i, null, new Node<V>(h, k, v, null)))
1267 break; // no lock when adding to empty bin
1268 }
1269 else if ((fh = f.hash) < 0) {
1270 if ((fk = f.key) instanceof TreeBin) {
1271 TreeBin<V> t = (TreeBin<V>)fk;
1272 V oldVal = null;
1273 t.acquire(0);
1274 try {
1275 if (tabAt(tab, i) == f) {
1276 len = 2;
1277 TreeNode<V> p = t.putTreeNode(h, k, v);
1278 if (p != null) {
1279 oldVal = p.val;
1280 if (!onlyIfAbsent)
1281 p.val = v;
1282 }
1283 }
1284 } finally {
1285 t.release(0);
1286 }
1287 if (len != 0) {
1288 if (oldVal != null)
1289 return oldVal;
1290 break;
1291 }
1292 }
1293 else
1294 tab = (Node<V>[])fk;
1295 }
1296 else if (onlyIfAbsent && fh == h && (fv = f.val) != null &&
1297 ((fk = f.key) == k || k.equals(fk))) // peek while nearby
1298 return fv;
1299 else {
1300 V oldVal = null;
1301 synchronized (f) {
1302 if (tabAt(tab, i) == f) {
1303 len = 1;
1304 for (Node<V> e = f;; ++len) {
1305 Object ek; V ev;
1306 if (e.hash == h &&
1307 (ev = e.val) != null &&
1308 ((ek = e.key) == k || k.equals(ek))) {
1309 oldVal = ev;
1310 if (!onlyIfAbsent)
1311 e.val = v;
1312 break;
1313 }
1314 Node<V> last = e;
1315 if ((e = e.next) == null) {
1316 last.next = new Node<V>(h, k, v, null);
1317 if (len >= TREE_THRESHOLD)
1318 replaceWithTreeBin(tab, i, k);
1319 break;
1320 }
1321 }
1322 }
1323 }
1324 if (len != 0) {
1325 if (oldVal != null)
1326 return oldVal;
1327 break;
1328 }
1329 }
1330 }
1331 addCount(1L, len);
1332 return null;
1333 }
1334
1335 /** Implementation for computeIfAbsent */
1336 @SuppressWarnings("unchecked") private final V internalComputeIfAbsent
1337 (K k, Function<? super K, ? extends V> mf) {
1338 if (k == null || mf == null)
1339 throw new NullPointerException();
1340 int h = spread(k.hashCode());
1341 V val = null;
1342 int len = 0;
1343 for (Node<V>[] tab = table;;) {
1344 Node<V> f; int i; Object fk;
1345 if (tab == null)
1346 tab = initTable();
1347 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1348 Node<V> node = new Node<V>(h, k, null, null);
1349 synchronized (node) {
1350 if (casTabAt(tab, i, null, node)) {
1351 len = 1;
1352 try {
1353 if ((val = mf.apply(k)) != null)
1354 node.val = val;
1355 } finally {
1356 if (val == null)
1357 setTabAt(tab, i, null);
1358 }
1359 }
1360 }
1361 if (len != 0)
1362 break;
1363 }
1364 else if (f.hash < 0) {
1365 if ((fk = f.key) instanceof TreeBin) {
1366 TreeBin<V> t = (TreeBin<V>)fk;
1367 boolean added = false;
1368 t.acquire(0);
1369 try {
1370 if (tabAt(tab, i) == f) {
1371 len = 1;
1372 TreeNode<V> p = t.getTreeNode(h, k, t.root);
1373 if (p != null)
1374 val = p.val;
1375 else if ((val = mf.apply(k)) != null) {
1376 added = true;
1377 len = 2;
1378 t.putTreeNode(h, k, val);
1379 }
1380 }
1381 } finally {
1382 t.release(0);
1383 }
1384 if (len != 0) {
1385 if (!added)
1386 return val;
1387 break;
1388 }
1389 }
1390 else
1391 tab = (Node<V>[])fk;
1392 }
1393 else {
1394 for (Node<V> e = f; e != null; e = e.next) { // prescan
1395 Object ek; V ev;
1396 if (e.hash == h && (ev = e.val) != null &&
1397 ((ek = e.key) == k || k.equals(ek)))
1398 return ev;
1399 }
1400 boolean added = false;
1401 synchronized (f) {
1402 if (tabAt(tab, i) == f) {
1403 len = 1;
1404 for (Node<V> e = f;; ++len) {
1405 Object ek; V ev;
1406 if (e.hash == h &&
1407 (ev = e.val) != null &&
1408 ((ek = e.key) == k || k.equals(ek))) {
1409 val = ev;
1410 break;
1411 }
1412 Node<V> last = e;
1413 if ((e = e.next) == null) {
1414 if ((val = mf.apply(k)) != null) {
1415 added = true;
1416 last.next = new Node<V>(h, k, val, null);
1417 if (len >= TREE_THRESHOLD)
1418 replaceWithTreeBin(tab, i, k);
1419 }
1420 break;
1421 }
1422 }
1423 }
1424 }
1425 if (len != 0) {
1426 if (!added)
1427 return val;
1428 break;
1429 }
1430 }
1431 }
1432 if (val != null)
1433 addCount(1L, len);
1434 return val;
1435 }
1436
1437 /** Implementation for compute */
1438 @SuppressWarnings("unchecked") private final V internalCompute
1439 (K k, boolean onlyIfPresent,
1440 BiFunction<? super K, ? super V, ? extends V> mf) {
1441 if (k == null || mf == null)
1442 throw new NullPointerException();
1443 int h = spread(k.hashCode());
1444 V val = null;
1445 int delta = 0;
1446 int len = 0;
1447 for (Node<V>[] tab = table;;) {
1448 Node<V> f; int i, fh; Object fk;
1449 if (tab == null)
1450 tab = initTable();
1451 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1452 if (onlyIfPresent)
1453 break;
1454 Node<V> node = new Node<V>(h, k, null, null);
1455 synchronized (node) {
1456 if (casTabAt(tab, i, null, node)) {
1457 try {
1458 len = 1;
1459 if ((val = mf.apply(k, null)) != null) {
1460 node.val = val;
1461 delta = 1;
1462 }
1463 } finally {
1464 if (delta == 0)
1465 setTabAt(tab, i, null);
1466 }
1467 }
1468 }
1469 if (len != 0)
1470 break;
1471 }
1472 else if ((fh = f.hash) < 0) {
1473 if ((fk = f.key) instanceof TreeBin) {
1474 TreeBin<V> t = (TreeBin<V>)fk;
1475 t.acquire(0);
1476 try {
1477 if (tabAt(tab, i) == f) {
1478 len = 1;
1479 TreeNode<V> p = t.getTreeNode(h, k, t.root);
1480 if (p == null && onlyIfPresent)
1481 break;
1482 V pv = (p == null) ? null : p.val;
1483 if ((val = mf.apply(k, pv)) != null) {
1484 if (p != null)
1485 p.val = val;
1486 else {
1487 len = 2;
1488 delta = 1;
1489 t.putTreeNode(h, k, val);
1490 }
1491 }
1492 else if (p != null) {
1493 delta = -1;
1494 t.deleteTreeNode(p);
1495 }
1496 }
1497 } finally {
1498 t.release(0);
1499 }
1500 if (len != 0)
1501 break;
1502 }
1503 else
1504 tab = (Node<V>[])fk;
1505 }
1506 else {
1507 synchronized (f) {
1508 if (tabAt(tab, i) == f) {
1509 len = 1;
1510 for (Node<V> e = f, pred = null;; ++len) {
1511 Object ek; V ev;
1512 if (e.hash == h &&
1513 (ev = e.val) != null &&
1514 ((ek = e.key) == k || k.equals(ek))) {
1515 val = mf.apply(k, ev);
1516 if (val != null)
1517 e.val = val;
1518 else {
1519 delta = -1;
1520 Node<V> en = e.next;
1521 if (pred != null)
1522 pred.next = en;
1523 else
1524 setTabAt(tab, i, en);
1525 }
1526 break;
1527 }
1528 pred = e;
1529 if ((e = e.next) == null) {
1530 if (!onlyIfPresent &&
1531 (val = mf.apply(k, null)) != null) {
1532 pred.next = new Node<V>(h, k, val, null);
1533 delta = 1;
1534 if (len >= TREE_THRESHOLD)
1535 replaceWithTreeBin(tab, i, k);
1536 }
1537 break;
1538 }
1539 }
1540 }
1541 }
1542 if (len != 0)
1543 break;
1544 }
1545 }
1546 if (delta != 0)
1547 addCount((long)delta, len);
1548 return val;
1549 }
1550
1551 /** Implementation for merge */
1552 @SuppressWarnings("unchecked") private final V internalMerge
1553 (K k, V v, BiFunction<? super V, ? super V, ? extends V> mf) {
1554 if (k == null || v == null || mf == null)
1555 throw new NullPointerException();
1556 int h = spread(k.hashCode());
1557 V val = null;
1558 int delta = 0;
1559 int len = 0;
1560 for (Node<V>[] tab = table;;) {
1561 int i; Node<V> f; Object fk; V fv;
1562 if (tab == null)
1563 tab = initTable();
1564 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null) {
1565 if (casTabAt(tab, i, null, new Node<V>(h, k, v, null))) {
1566 delta = 1;
1567 val = v;
1568 break;
1569 }
1570 }
1571 else if (f.hash < 0) {
1572 if ((fk = f.key) instanceof TreeBin) {
1573 TreeBin<V> t = (TreeBin<V>)fk;
1574 t.acquire(0);
1575 try {
1576 if (tabAt(tab, i) == f) {
1577 len = 1;
1578 TreeNode<V> p = t.getTreeNode(h, k, t.root);
1579 val = (p == null) ? v : mf.apply(p.val, v);
1580 if (val != null) {
1581 if (p != null)
1582 p.val = val;
1583 else {
1584 len = 2;
1585 delta = 1;
1586 t.putTreeNode(h, k, val);
1587 }
1588 }
1589 else if (p != null) {
1590 delta = -1;
1591 t.deleteTreeNode(p);
1592 }
1593 }
1594 } finally {
1595 t.release(0);
1596 }
1597 if (len != 0)
1598 break;
1599 }
1600 else
1601 tab = (Node<V>[])fk;
1602 }
1603 else {
1604 synchronized (f) {
1605 if (tabAt(tab, i) == f) {
1606 len = 1;
1607 for (Node<V> e = f, pred = null;; ++len) {
1608 Object ek; V ev;
1609 if (e.hash == h &&
1610 (ev = e.val) != null &&
1611 ((ek = e.key) == k || k.equals(ek))) {
1612 val = mf.apply(ev, v);
1613 if (val != null)
1614 e.val = val;
1615 else {
1616 delta = -1;
1617 Node<V> en = e.next;
1618 if (pred != null)
1619 pred.next = en;
1620 else
1621 setTabAt(tab, i, en);
1622 }
1623 break;
1624 }
1625 pred = e;
1626 if ((e = e.next) == null) {
1627 val = v;
1628 pred.next = new Node<V>(h, k, val, null);
1629 delta = 1;
1630 if (len >= TREE_THRESHOLD)
1631 replaceWithTreeBin(tab, i, k);
1632 break;
1633 }
1634 }
1635 }
1636 }
1637 if (len != 0)
1638 break;
1639 }
1640 }
1641 if (delta != 0)
1642 addCount((long)delta, len);
1643 return val;
1644 }
1645
1646 /** Implementation for putAll */
1647 @SuppressWarnings("unchecked") private final void internalPutAll
1648 (Map<? extends K, ? extends V> m) {
1649 tryPresize(m.size());
1650 long delta = 0L; // number of uncommitted additions
1651 boolean npe = false; // to throw exception on exit for nulls
1652 try { // to clean up counts on other exceptions
1653 for (Map.Entry<?, ? extends V> entry : m.entrySet()) {
1654 Object k; V v;
1655 if (entry == null || (k = entry.getKey()) == null ||
1656 (v = entry.getValue()) == null) {
1657 npe = true;
1658 break;
1659 }
1660 int h = spread(k.hashCode());
1661 for (Node<V>[] tab = table;;) {
1662 int i; Node<V> f; int fh; Object fk;
1663 if (tab == null)
1664 tab = initTable();
1665 else if ((f = tabAt(tab, i = (tab.length - 1) & h)) == null){
1666 if (casTabAt(tab, i, null, new Node<V>(h, k, v, null))) {
1667 ++delta;
1668 break;
1669 }
1670 }
1671 else if ((fh = f.hash) < 0) {
1672 if ((fk = f.key) instanceof TreeBin) {
1673 TreeBin<V> t = (TreeBin<V>)fk;
1674 boolean validated = false;
1675 t.acquire(0);
1676 try {
1677 if (tabAt(tab, i) == f) {
1678 validated = true;
1679 TreeNode<V> p = t.getTreeNode(h, k, t.root);
1680 if (p != null)
1681 p.val = v;
1682 else {
1683 t.putTreeNode(h, k, v);
1684 ++delta;
1685 }
1686 }
1687 } finally {
1688 t.release(0);
1689 }
1690 if (validated)
1691 break;
1692 }
1693 else
1694 tab = (Node<V>[])fk;
1695 }
1696 else {
1697 int len = 0;
1698 synchronized (f) {
1699 if (tabAt(tab, i) == f) {
1700 len = 1;
1701 for (Node<V> e = f;; ++len) {
1702 Object ek; V ev;
1703 if (e.hash == h &&
1704 (ev = e.val) != null &&
1705 ((ek = e.key) == k || k.equals(ek))) {
1706 e.val = v;
1707 break;
1708 }
1709 Node<V> last = e;
1710 if ((e = e.next) == null) {
1711 ++delta;
1712 last.next = new Node<V>(h, k, v, null);
1713 if (len >= TREE_THRESHOLD)
1714 replaceWithTreeBin(tab, i, k);
1715 break;
1716 }
1717 }
1718 }
1719 }
1720 if (len != 0) {
1721 if (len > 1)
1722 addCount(delta, len);
1723 break;
1724 }
1725 }
1726 }
1727 }
1728 } finally {
1729 if (delta != 0L)
1730 addCount(delta, 2);
1731 }
1732 if (npe)
1733 throw new NullPointerException();
1734 }
1735
1736 /**
1737 * Implementation for clear. Steps through each bin, removing all
1738 * nodes.
1739 */
1740 @SuppressWarnings("unchecked") private final void internalClear() {
1741 long delta = 0L; // negative number of deletions
1742 int i = 0;
1743 Node<V>[] tab = table;
1744 while (tab != null && i < tab.length) {
1745 Node<V> f = tabAt(tab, i);
1746 if (f == null)
1747 ++i;
1748 else if (f.hash < 0) {
1749 Object fk;
1750 if ((fk = f.key) instanceof TreeBin) {
1751 TreeBin<V> t = (TreeBin<V>)fk;
1752 t.acquire(0);
1753 try {
1754 if (tabAt(tab, i) == f) {
1755 for (Node<V> p = t.first; p != null; p = p.next) {
1756 if (p.val != null) { // (currently always true)
1757 p.val = null;
1758 --delta;
1759 }
1760 }
1761 t.first = null;
1762 t.root = null;
1763 ++i;
1764 }
1765 } finally {
1766 t.release(0);
1767 }
1768 }
1769 else
1770 tab = (Node<V>[])fk;
1771 }
1772 else {
1773 synchronized (f) {
1774 if (tabAt(tab, i) == f) {
1775 for (Node<V> e = f; e != null; e = e.next) {
1776 if (e.val != null) { // (currently always true)
1777 e.val = null;
1778 --delta;
1779 }
1780 }
1781 setTabAt(tab, i, null);
1782 ++i;
1783 }
1784 }
1785 }
1786 }
1787 if (delta != 0L)
1788 addCount(delta, -1);
1789 }
1790
1791 /* ---------------- Table Initialization and Resizing -------------- */
1792
1793 /**
1794 * Returns a power of two table size for the given desired capacity.
1795 * See Hackers Delight, sec 3.2
1796 */
1797 private static final int tableSizeFor(int c) {
1798 int n = c - 1;
1799 n |= n >>> 1;
1800 n |= n >>> 2;
1801 n |= n >>> 4;
1802 n |= n >>> 8;
1803 n |= n >>> 16;
1804 return (n < 0) ? 1 : (n >= MAXIMUM_CAPACITY) ? MAXIMUM_CAPACITY : n + 1;
1805 }
1806
1807 /**
1808 * Initializes table, using the size recorded in sizeCtl.
1809 */
1810 @SuppressWarnings("unchecked") private final Node<V>[] initTable() {
1811 Node<V>[] tab; int sc;
1812 while ((tab = table) == null) {
1813 if ((sc = sizeCtl) < 0)
1814 Thread.yield(); // lost initialization race; just spin
1815 else if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
1816 try {
1817 if ((tab = table) == null) {
1818 int n = (sc > 0) ? sc : DEFAULT_CAPACITY;
1819 @SuppressWarnings("rawtypes") Node[] tb = new Node[n];
1820 table = tab = (Node<V>[])tb;
1821 sc = n - (n >>> 2);
1822 }
1823 } finally {
1824 sizeCtl = sc;
1825 }
1826 break;
1827 }
1828 }
1829 return tab;
1830 }
1831
1832 /**
1833 * Adds to count, and if table is too small and not already
1834 * resizing, initiates transfer. If already resizing, helps
1835 * perform transfer if work is available. Rechecks occupancy
1836 * after a transfer to see if another resize is already needed
1837 * because resizings are lagging additions.
1838 *
1839 * @param x the count to add
1840 * @param check if <0, don't check resize, if <= 1 only check if uncontended
1841 */
1842 private final void addCount(long x, int check) {
1843 Cell[] as; long b, s;
1844 if ((as = counterCells) != null ||
1845 !U.compareAndSwapLong(this, BASECOUNT, b = baseCount, s = b + x)) {
1846 CellHashCode hc; Cell a; long v; int m;
1847 boolean uncontended = true;
1848 if ((hc = threadCellHashCode.get()) == null ||
1849 as == null || (m = as.length - 1) < 0 ||
1850 (a = as[m & hc.code]) == null ||
1851 !(uncontended =
1852 U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))) {
1853 fullAddCount(x, hc, uncontended);
1854 return;
1855 }
1856 if (check <= 1)
1857 return;
1858 s = sumCount();
1859 }
1860 if (check >= 0) {
1861 Node<V>[] tab, nt; int sc;
1862 while (s >= (long)(sc = sizeCtl) && (tab = table) != null &&
1863 tab.length < MAXIMUM_CAPACITY) {
1864 if (sc < 0) {
1865 if (sc == -1 || transferIndex <= transferOrigin ||
1866 (nt = nextTable) == null)
1867 break;
1868 if (U.compareAndSwapInt(this, SIZECTL, sc, sc - 1))
1869 transfer(tab, nt);
1870 }
1871 else if (U.compareAndSwapInt(this, SIZECTL, sc, -2))
1872 transfer(tab, null);
1873 s = sumCount();
1874 }
1875 }
1876 }
1877
1878 /**
1879 * Tries to presize table to accommodate the given number of elements.
1880 *
1881 * @param size number of elements (doesn't need to be perfectly accurate)
1882 */
1883 @SuppressWarnings("unchecked") private final void tryPresize(int size) {
1884 int c = (size >= (MAXIMUM_CAPACITY >>> 1)) ? MAXIMUM_CAPACITY :
1885 tableSizeFor(size + (size >>> 1) + 1);
1886 int sc;
1887 while ((sc = sizeCtl) >= 0) {
1888 Node<V>[] tab = table; int n;
1889 if (tab == null || (n = tab.length) == 0) {
1890 n = (sc > c) ? sc : c;
1891 if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
1892 try {
1893 if (table == tab) {
1894 @SuppressWarnings("rawtypes") Node[] tb = new Node[n];
1895 table = (Node<V>[])tb;
1896 sc = n - (n >>> 2);
1897 }
1898 } finally {
1899 sizeCtl = sc;
1900 }
1901 }
1902 }
1903 else if (c <= sc || n >= MAXIMUM_CAPACITY)
1904 break;
1905 else if (tab == table &&
1906 U.compareAndSwapInt(this, SIZECTL, sc, -2))
1907 transfer(tab, null);
1908 }
1909 }
1910
1911 /*
1912 * Moves and/or copies the nodes in each bin to new table. See
1913 * above for explanation.
1914 */
1915 @SuppressWarnings("unchecked") private final void transfer
1916 (Node<V>[] tab, Node<V>[] nextTab) {
1917 int n = tab.length, stride;
1918 if ((stride = (NCPU > 1) ? (n >>> 3) / NCPU : n) < MIN_TRANSFER_STRIDE)
1919 stride = MIN_TRANSFER_STRIDE; // subdivide range
1920 if (nextTab == null) { // initiating
1921 try {
1922 @SuppressWarnings("rawtypes") Node[] tb = new Node[n << 1];
1923 nextTab = (Node<V>[])tb;
1924 } catch (Throwable ex) { // try to cope with OOME
1925 sizeCtl = Integer.MAX_VALUE;
1926 return;
1927 }
1928 nextTable = nextTab;
1929 transferOrigin = n;
1930 transferIndex = n;
1931 Node<V> rev = new Node<V>(MOVED, tab, null, null);
1932 for (int k = n; k > 0;) { // progressively reveal ready slots
1933 int nextk = (k > stride) ? k - stride : 0;
1934 for (int m = nextk; m < k; ++m)
1935 nextTab[m] = rev;
1936 for (int m = n + nextk; m < n + k; ++m)
1937 nextTab[m] = rev;
1938 U.putOrderedInt(this, TRANSFERORIGIN, k = nextk);
1939 }
1940 }
1941 int nextn = nextTab.length;
1942 Node<V> fwd = new Node<V>(MOVED, nextTab, null, null);
1943 boolean advance = true;
1944 for (int i = 0, bound = 0;;) {
1945 int nextIndex, nextBound; Node<V> f; Object fk;
1946 while (advance) {
1947 if (--i >= bound)
1948 advance = false;
1949 else if ((nextIndex = transferIndex) <= transferOrigin) {
1950 i = -1;
1951 advance = false;
1952 }
1953 else if (U.compareAndSwapInt
1954 (this, TRANSFERINDEX, nextIndex,
1955 nextBound = (nextIndex > stride ?
1956 nextIndex - stride : 0))) {
1957 bound = nextBound;
1958 i = nextIndex - 1;
1959 advance = false;
1960 }
1961 }
1962 if (i < 0 || i >= n || i + n >= nextn) {
1963 for (int sc;;) {
1964 if (U.compareAndSwapInt(this, SIZECTL, sc = sizeCtl, ++sc)) {
1965 if (sc == -1) {
1966 nextTable = null;
1967 table = nextTab;
1968 sizeCtl = (n << 1) - (n >>> 1);
1969 }
1970 return;
1971 }
1972 }
1973 }
1974 else if ((f = tabAt(tab, i)) == null) {
1975 if (casTabAt(tab, i, null, fwd)) {
1976 setTabAt(nextTab, i, null);
1977 setTabAt(nextTab, i + n, null);
1978 advance = true;
1979 }
1980 }
1981 else if (f.hash >= 0) {
1982 synchronized (f) {
1983 if (tabAt(tab, i) == f) {
1984 int runBit = f.hash & n;
1985 Node<V> lastRun = f, lo = null, hi = null;
1986 for (Node<V> p = f.next; p != null; p = p.next) {
1987 int b = p.hash & n;
1988 if (b != runBit) {
1989 runBit = b;
1990 lastRun = p;
1991 }
1992 }
1993 if (runBit == 0)
1994 lo = lastRun;
1995 else
1996 hi = lastRun;
1997 for (Node<V> p = f; p != lastRun; p = p.next) {
1998 int ph = p.hash;
1999 Object pk = p.key; V pv = p.val;
2000 if ((ph & n) == 0)
2001 lo = new Node<V>(ph, pk, pv, lo);
2002 else
2003 hi = new Node<V>(ph, pk, pv, hi);
2004 }
2005 setTabAt(nextTab, i, lo);
2006 setTabAt(nextTab, i + n, hi);
2007 setTabAt(tab, i, fwd);
2008 advance = true;
2009 }
2010 }
2011 }
2012 else if ((fk = f.key) instanceof TreeBin) {
2013 TreeBin<V> t = (TreeBin<V>)fk;
2014 t.acquire(0);
2015 try {
2016 if (tabAt(tab, i) == f) {
2017 TreeBin<V> lt = new TreeBin<V>();
2018 TreeBin<V> ht = new TreeBin<V>();
2019 int lc = 0, hc = 0;
2020 for (Node<V> e = t.first; e != null; e = e.next) {
2021 int h = e.hash;
2022 Object k = e.key; V v = e.val;
2023 if ((h & n) == 0) {
2024 ++lc;
2025 lt.putTreeNode(h, k, v);
2026 }
2027 else {
2028 ++hc;
2029 ht.putTreeNode(h, k, v);
2030 }
2031 }
2032 Node<V> ln, hn; // throw away trees if too small
2033 if (lc < TREE_THRESHOLD) {
2034 ln = null;
2035 for (Node<V> p = lt.first; p != null; p = p.next)
2036 ln = new Node<V>(p.hash, p.key, p.val, ln);
2037 }
2038 else
2039 ln = new Node<V>(MOVED, lt, null, null);
2040 setTabAt(nextTab, i, ln);
2041 if (hc < TREE_THRESHOLD) {
2042 hn = null;
2043 for (Node<V> p = ht.first; p != null; p = p.next)
2044 hn = new Node<V>(p.hash, p.key, p.val, hn);
2045 }
2046 else
2047 hn = new Node<V>(MOVED, ht, null, null);
2048 setTabAt(nextTab, i + n, hn);
2049 setTabAt(tab, i, fwd);
2050 advance = true;
2051 }
2052 } finally {
2053 t.release(0);
2054 }
2055 }
2056 else
2057 advance = true; // already processed
2058 }
2059 }
2060
2061 /* ---------------- Counter support -------------- */
2062
2063 final long sumCount() {
2064 Cell[] as = counterCells; Cell a;
2065 long sum = baseCount;
2066 if (as != null) {
2067 for (int i = 0; i < as.length; ++i) {
2068 if ((a = as[i]) != null)
2069 sum += a.value;
2070 }
2071 }
2072 return sum;
2073 }
2074
2075 // See LongAdder version for explanation
2076 private final void fullAddCount(long x, CellHashCode hc,
2077 boolean wasUncontended) {
2078 int h;
2079 if (hc == null) {
2080 hc = new CellHashCode();
2081 int s = cellHashCodeGenerator.addAndGet(SEED_INCREMENT);
2082 h = hc.code = (s == 0) ? 1 : s; // Avoid zero
2083 threadCellHashCode.set(hc);
2084 }
2085 else
2086 h = hc.code;
2087 boolean collide = false; // True if last slot nonempty
2088 for (;;) {
2089 Cell[] as; Cell a; int n; long v;
2090 if ((as = counterCells) != null && (n = as.length) > 0) {
2091 if ((a = as[(n - 1) & h]) == null) {
2092 if (cellsBusy == 0) { // Try to attach new Cell
2093 Cell r = new Cell(x); // Optimistic create
2094 if (cellsBusy == 0 &&
2095 U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
2096 boolean created = false;
2097 try { // Recheck under lock
2098 Cell[] rs; int m, j;
2099 if ((rs = counterCells) != null &&
2100 (m = rs.length) > 0 &&
2101 rs[j = (m - 1) & h] == null) {
2102 rs[j] = r;
2103 created = true;
2104 }
2105 } finally {
2106 cellsBusy = 0;
2107 }
2108 if (created)
2109 break;
2110 continue; // Slot is now non-empty
2111 }
2112 }
2113 collide = false;
2114 }
2115 else if (!wasUncontended) // CAS already known to fail
2116 wasUncontended = true; // Continue after rehash
2117 else if (U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))
2118 break;
2119 else if (counterCells != as || n >= NCPU)
2120 collide = false; // At max size or stale
2121 else if (!collide)
2122 collide = true;
2123 else if (cellsBusy == 0 &&
2124 U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
2125 try {
2126 if (counterCells == as) {// Expand table unless stale
2127 Cell[] rs = new Cell[n << 1];
2128 for (int i = 0; i < n; ++i)
2129 rs[i] = as[i];
2130 counterCells = rs;
2131 }
2132 } finally {
2133 cellsBusy = 0;
2134 }
2135 collide = false;
2136 continue; // Retry with expanded table
2137 }
2138 h ^= h << 13; // Rehash
2139 h ^= h >>> 17;
2140 h ^= h << 5;
2141 }
2142 else if (cellsBusy == 0 && counterCells == as &&
2143 U.compareAndSwapInt(this, CELLSBUSY, 0, 1)) {
2144 boolean init = false;
2145 try { // Initialize table
2146 if (counterCells == as) {
2147 Cell[] rs = new Cell[2];
2148 rs[h & 1] = new Cell(x);
2149 counterCells = rs;
2150 init = true;
2151 }
2152 } finally {
2153 cellsBusy = 0;
2154 }
2155 if (init)
2156 break;
2157 }
2158 else if (U.compareAndSwapLong(this, BASECOUNT, v = baseCount, v + x))
2159 break; // Fall back on using base
2160 }
2161 hc.code = h; // Record index for next time
2162 }
2163
2164 /* ----------------Table Traversal -------------- */
2165
2166 /**
2167 * Encapsulates traversal for methods such as containsValue; also
2168 * serves as a base class for other iterators and bulk tasks.
2169 *
2170 * At each step, the iterator snapshots the key ("nextKey") and
2171 * value ("nextVal") of a valid node (i.e., one that, at point of
2172 * snapshot, has a non-null user value). Because val fields can
2173 * change (including to null, indicating deletion), field nextVal
2174 * might not be accurate at point of use, but still maintains the
2175 * weak consistency property of holding a value that was once
2176 * valid. To support iterator.remove, the nextKey field is not
2177 * updated (nulled out) when the iterator cannot advance.
2178 *
2179 * Internal traversals directly access these fields, as in:
2180 * {@code while (it.advance() != null) { process(it.nextKey); }}
2181 *
2182 * Exported iterators must track whether the iterator has advanced
2183 * (in hasNext vs next) (by setting/checking/nulling field
2184 * nextVal), and then extract key, value, or key-value pairs as
2185 * return values of next().
2186 *
2187 * The iterator visits once each still-valid node that was
2188 * reachable upon iterator construction. It might miss some that
2189 * were added to a bin after the bin was visited, which is OK wrt
2190 * consistency guarantees. Maintaining this property in the face
2191 * of possible ongoing resizes requires a fair amount of
2192 * bookkeeping state that is difficult to optimize away amidst
2193 * volatile accesses. Even so, traversal maintains reasonable
2194 * throughput.
2195 *
2196 * Normally, iteration proceeds bin-by-bin traversing lists.
2197 * However, if the table has been resized, then all future steps
2198 * must traverse both the bin at the current index as well as at
2199 * (index + baseSize); and so on for further resizings. To
2200 * paranoically cope with potential sharing by users of iterators
2201 * across threads, iteration terminates if a bounds checks fails
2202 * for a table read.
2203 *
2204 * This class supports both Spliterator-based traversal and
2205 * CountedCompleter-based bulk tasks. The same "batch" field is
2206 * used, but in slightly different ways, in the two cases. For
2207 * Spliterators, it is a saturating (at Integer.MAX_VALUE)
2208 * estimate of element coverage. For CHM tasks, it is a pre-scaled
2209 * size that halves down to zero for leaf tasks, that is only
2210 * computed upon execution of the task. (Tasks can be submitted to
2211 * any pool, of any size, so we don't know scale factors until
2212 * running.)
2213 *
2214 * This class extends CountedCompleter to streamline parallel
2215 * iteration in bulk operations. This adds only a few fields of
2216 * space overhead, which is small enough in cases where it is not
2217 * needed to not worry about it. Because CountedCompleter is
2218 * Serializable, but iterators need not be, we need to add warning
2219 * suppressions.
2220 */
2221 @SuppressWarnings("serial") static class Traverser<K,V,R>
2222 extends CountedCompleter<R> {
2223 final ConcurrentHashMap<K, V> map;
2224 Node<V> next; // the next entry to use
2225 Object nextKey; // cached key field of next
2226 V nextVal; // cached val field of next
2227 Node<V>[] tab; // current table; updated if resized
2228 int index; // index of bin to use next
2229 int baseIndex; // current index of initial table
2230 int baseLimit; // index bound for initial table
2231 int baseSize; // initial table size
2232 int batch; // split control
2233 /** Creates iterator for all entries in the table. */
2234 Traverser(ConcurrentHashMap<K, V> map) {
2235 this.map = map;
2236 Node<V>[] t;
2237 if ((t = tab = map.table) != null)
2238 baseLimit = baseSize = t.length;
2239 }
2240
2241 /** Task constructor */
2242 Traverser(ConcurrentHashMap<K,V> map, Traverser<K,V,?> it, int batch) {
2243 super(it);
2244 this.map = map;
2245 this.batch = batch; // -1 if unknown
2246 if (it == null) {
2247 Node<V>[] t;
2248 if ((t = tab = map.table) != null)
2249 baseLimit = baseSize = t.length;
2250 }
2251 else { // split parent
2252 this.tab = it.tab;
2253 this.baseSize = it.baseSize;
2254 int hi = this.baseLimit = it.baseLimit;
2255 it.baseLimit = this.index = this.baseIndex =
2256 (hi + it.baseIndex + 1) >>> 1;
2257 }
2258 }
2259
2260 /** Spliterator constructor */
2261 Traverser(ConcurrentHashMap<K,V> map, Traverser<K,V,?> it) {
2262 super(it);
2263 this.map = map;
2264 if (it == null) {
2265 Node<V>[] t;
2266 if ((t = tab = map.table) != null)
2267 baseLimit = baseSize = t.length;
2268 long n = map.sumCount();
2269 batch = ((n > (long)Integer.MAX_VALUE) ? Integer.MAX_VALUE :
2270 (int)n);
2271 }
2272 else {
2273 this.tab = it.tab;
2274 this.baseSize = it.baseSize;
2275 int hi = this.baseLimit = it.baseLimit;
2276 it.baseLimit = this.index = this.baseIndex =
2277 (hi + it.baseIndex + 1) >>> 1;
2278 this.batch = it.batch >>>= 1;
2279 }
2280 }
2281
2282 /**
2283 * Advances next; returns nextVal or null if terminated.
2284 * See above for explanation.
2285 */
2286 @SuppressWarnings("unchecked") final V advance() {
2287 Node<V> e = next;
2288 V ev = null;
2289 outer: do {
2290 if (e != null) // advance past used/skipped node
2291 e = e.next;
2292 while (e == null) { // get to next non-null bin
2293 ConcurrentHashMap<K, V> m;
2294 Node<V>[] t; int b, i, n; Object ek; // must use locals
2295 if ((t = tab) != null)
2296 n = t.length;
2297 else if ((m = map) != null && (t = tab = m.table) != null)
2298 n = baseLimit = baseSize = t.length;
2299 else
2300 break outer;
2301 if ((b = baseIndex) >= baseLimit ||
2302 (i = index) < 0 || i >= n)
2303 break outer;
2304 if ((e = tabAt(t, i)) != null && e.hash < 0) {
2305 if ((ek = e.key) instanceof TreeBin)
2306 e = ((TreeBin<V>)ek).first;
2307 else {
2308 tab = (Node<V>[])ek;
2309 continue; // restarts due to null val
2310 }
2311 } // visit upper slots if present
2312 index = (i += baseSize) < n ? i : (baseIndex = b + 1);
2313 }
2314 nextKey = e.key;
2315 } while ((ev = e.val) == null); // skip deleted or special nodes
2316 next = e;
2317 return nextVal = ev;
2318 }
2319
2320 public final void remove() {
2321 Object k = nextKey;
2322 if (k == null && (advance() == null || (k = nextKey) == null))
2323 throw new IllegalStateException();
2324 map.internalReplace(k, null, null);
2325 }
2326
2327 public final boolean hasNext() {
2328 return nextVal != null || advance() != null;
2329 }
2330
2331 public final boolean hasMoreElements() { return hasNext(); }
2332
2333 public void compute() { } // default no-op CountedCompleter body
2334
2335 /**
2336 * Returns a batch value > 0 if this task should (and must) be
2337 * split, if so, adding to pending count, and in any case
2338 * updating batch value. The initial batch value is approx
2339 * exp2 of the number of times (minus one) to split task by
2340 * two before executing leaf action. This value is faster to
2341 * compute and more convenient to use as a guide to splitting
2342 * than is the depth, since it is used while dividing by two
2343 * anyway.
2344 */
2345 final int preSplit() {
2346 int b; ForkJoinPool pool;
2347 if ((b = batch) < 0) { // force initialization
2348 int sp = (((pool = getPool()) == null) ?
2349 ForkJoinPool.getCommonPoolParallelism() :
2350 pool.getParallelism()) << 3; // slack of 8
2351 long n = map.sumCount();
2352 b = (n <= 0L) ? 0 : (n < (long)sp) ? (int)n : sp;
2353 }
2354 b = (b <= 1 || baseIndex == baseLimit) ? 0 : (b >>> 1);
2355 if ((batch = b) > 0)
2356 addToPendingCount(1);
2357 return b;
2358 }
2359
2360 // spliterator support
2361
2362 public long exactSizeIfKnown() {
2363 return -1;
2364 }
2365
2366 public boolean hasExactSplits() {
2367 return false;
2368 }
2369
2370 public int getNaturalSplits() {
2371 return baseLimit > baseIndex ? 1 : 0;
2372 }
2373
2374 public long estimateSize() {
2375 return batch;
2376 }
2377 }
2378
2379 /* ---------------- Public operations -------------- */
2380
2381 /**
2382 * Creates a new, empty map with the default initial table size (16).
2383 */
2384 public ConcurrentHashMap() {
2385 }
2386
2387 /**
2388 * Creates a new, empty map with an initial table size
2389 * accommodating the specified number of elements without the need
2390 * to dynamically resize.
2391 *
2392 * @param initialCapacity The implementation performs internal
2393 * sizing to accommodate this many elements.
2394 * @throws IllegalArgumentException if the initial capacity of
2395 * elements is negative
2396 */
2397 public ConcurrentHashMap(int initialCapacity) {
2398 if (initialCapacity < 0)
2399 throw new IllegalArgumentException();
2400 int cap = ((initialCapacity >= (MAXIMUM_CAPACITY >>> 1)) ?
2401 MAXIMUM_CAPACITY :
2402 tableSizeFor(initialCapacity + (initialCapacity >>> 1) + 1));
2403 this.sizeCtl = cap;
2404 }
2405
2406 /**
2407 * Creates a new map with the same mappings as the given map.
2408 *
2409 * @param m the map
2410 */
2411 public ConcurrentHashMap(Map<? extends K, ? extends V> m) {
2412 this.sizeCtl = DEFAULT_CAPACITY;
2413 internalPutAll(m);
2414 }
2415
2416 /**
2417 * Creates a new, empty map with an initial table size based on
2418 * the given number of elements ({@code initialCapacity}) and
2419 * initial table density ({@code loadFactor}).
2420 *
2421 * @param initialCapacity the initial capacity. The implementation
2422 * performs internal sizing to accommodate this many elements,
2423 * given the specified load factor.
2424 * @param loadFactor the load factor (table density) for
2425 * establishing the initial table size
2426 * @throws IllegalArgumentException if the initial capacity of
2427 * elements is negative or the load factor is nonpositive
2428 *
2429 * @since 1.6
2430 */
2431 public ConcurrentHashMap(int initialCapacity, float loadFactor) {
2432 this(initialCapacity, loadFactor, 1);
2433 }
2434
2435 /**
2436 * Creates a new, empty map with an initial table size based on
2437 * the given number of elements ({@code initialCapacity}), table
2438 * density ({@code loadFactor}), and number of concurrently
2439 * updating threads ({@code concurrencyLevel}).
2440 *
2441 * @param initialCapacity the initial capacity. The implementation
2442 * performs internal sizing to accommodate this many elements,
2443 * given the specified load factor.
2444 * @param loadFactor the load factor (table density) for
2445 * establishing the initial table size
2446 * @param concurrencyLevel the estimated number of concurrently
2447 * updating threads. The implementation may use this value as
2448 * a sizing hint.
2449 * @throws IllegalArgumentException if the initial capacity is
2450 * negative or the load factor or concurrencyLevel are
2451 * nonpositive
2452 */
2453 public ConcurrentHashMap(int initialCapacity,
2454 float loadFactor, int concurrencyLevel) {
2455 if (!(loadFactor > 0.0f) || initialCapacity < 0 || concurrencyLevel <= 0)
2456 throw new IllegalArgumentException();
2457 if (initialCapacity < concurrencyLevel) // Use at least as many bins
2458 initialCapacity = concurrencyLevel; // as estimated threads
2459 long size = (long)(1.0 + (long)initialCapacity / loadFactor);
2460 int cap = (size >= (long)MAXIMUM_CAPACITY) ?
2461 MAXIMUM_CAPACITY : tableSizeFor((int)size);
2462 this.sizeCtl = cap;
2463 }
2464
2465 /**
2466 * Creates a new {@link Set} backed by a ConcurrentHashMap
2467 * from the given type to {@code Boolean.TRUE}.
2468 *
2469 * @return the new set
2470 */
2471 public static <K> KeySetView<K,Boolean> newKeySet() {
2472 return new KeySetView<K,Boolean>(new ConcurrentHashMap<K,Boolean>(),
2473 Boolean.TRUE);
2474 }
2475
2476 /**
2477 * Creates a new {@link Set} backed by a ConcurrentHashMap
2478 * from the given type to {@code Boolean.TRUE}.
2479 *
2480 * @param initialCapacity The implementation performs internal
2481 * sizing to accommodate this many elements.
2482 * @throws IllegalArgumentException if the initial capacity of
2483 * elements is negative
2484 * @return the new set
2485 */
2486 public static <K> KeySetView<K,Boolean> newKeySet(int initialCapacity) {
2487 return new KeySetView<K,Boolean>
2488 (new ConcurrentHashMap<K,Boolean>(initialCapacity), Boolean.TRUE);
2489 }
2490
2491 /**
2492 * {@inheritDoc}
2493 */
2494 public boolean isEmpty() {
2495 return sumCount() <= 0L; // ignore transient negative values
2496 }
2497
2498 /**
2499 * {@inheritDoc}
2500 */
2501 public int size() {
2502 long n = sumCount();
2503 return ((n < 0L) ? 0 :
2504 (n > (long)Integer.MAX_VALUE) ? Integer.MAX_VALUE :
2505 (int)n);
2506 }
2507
2508 /**
2509 * Returns the number of mappings. This method should be used
2510 * instead of {@link #size} because a ConcurrentHashMap may
2511 * contain more mappings than can be represented as an int. The
2512 * value returned is an estimate; the actual count may differ if
2513 * there are concurrent insertions or removals.
2514 *
2515 * @return the number of mappings
2516 */
2517 public long mappingCount() {
2518 long n = sumCount();
2519 return (n < 0L) ? 0L : n; // ignore transient negative values
2520 }
2521
2522 /**
2523 * Returns the value to which the specified key is mapped,
2524 * or {@code null} if this map contains no mapping for the key.
2525 *
2526 * <p>More formally, if this map contains a mapping from a key
2527 * {@code k} to a value {@code v} such that {@code key.equals(k)},
2528 * then this method returns {@code v}; otherwise it returns
2529 * {@code null}. (There can be at most one such mapping.)
2530 *
2531 * @throws NullPointerException if the specified key is null
2532 */
2533 public V get(Object key) {
2534 return internalGet(key);
2535 }
2536
2537 /**
2538 * Returns the value to which the specified key is mapped,
2539 * or the given defaultValue if this map contains no mapping for the key.
2540 *
2541 * @param key the key
2542 * @param defaultValue the value to return if this map contains
2543 * no mapping for the given key
2544 * @return the mapping for the key, if present; else the defaultValue
2545 * @throws NullPointerException if the specified key is null
2546 */
2547 public V getValueOrDefault(Object key, V defaultValue) {
2548 V v;
2549 return (v = internalGet(key)) == null ? defaultValue : v;
2550 }
2551
2552 /**
2553 * Tests if the specified object is a key in this table.
2554 *
2555 * @param key possible key
2556 * @return {@code true} if and only if the specified object
2557 * is a key in this table, as determined by the
2558 * {@code equals} method; {@code false} otherwise
2559 * @throws NullPointerException if the specified key is null
2560 */
2561 public boolean containsKey(Object key) {
2562 return internalGet(key) != null;
2563 }
2564
2565 /**
2566 * Returns {@code true} if this map maps one or more keys to the
2567 * specified value. Note: This method may require a full traversal
2568 * of the map, and is much slower than method {@code containsKey}.
2569 *
2570 * @param value value whose presence in this map is to be tested
2571 * @return {@code true} if this map maps one or more keys to the
2572 * specified value
2573 * @throws NullPointerException if the specified value is null
2574 */
2575 public boolean containsValue(Object value) {
2576 if (value == null)
2577 throw new NullPointerException();
2578 V v;
2579 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
2580 while ((v = it.advance()) != null) {
2581 if (v == value || value.equals(v))
2582 return true;
2583 }
2584 return false;
2585 }
2586
2587 /**
2588 * Legacy method testing if some key maps into the specified value
2589 * in this table. This method is identical in functionality to
2590 * {@link #containsValue}, and exists solely to ensure
2591 * full compatibility with class {@link java.util.Hashtable},
2592 * which supported this method prior to introduction of the
2593 * Java Collections framework.
2594 *
2595 * @param value a value to search for
2596 * @return {@code true} if and only if some key maps to the
2597 * {@code value} argument in this table as
2598 * determined by the {@code equals} method;
2599 * {@code false} otherwise
2600 * @throws NullPointerException if the specified value is null
2601 */
2602 @Deprecated public boolean contains(Object value) {
2603 return containsValue(value);
2604 }
2605
2606 /**
2607 * Maps the specified key to the specified value in this table.
2608 * Neither the key nor the value can be null.
2609 *
2610 * <p>The value can be retrieved by calling the {@code get} method
2611 * with a key that is equal to the original key.
2612 *
2613 * @param key key with which the specified value is to be associated
2614 * @param value value to be associated with the specified key
2615 * @return the previous value associated with {@code key}, or
2616 * {@code null} if there was no mapping for {@code key}
2617 * @throws NullPointerException if the specified key or value is null
2618 */
2619 public V put(K key, V value) {
2620 return internalPut(key, value, false);
2621 }
2622
2623 /**
2624 * {@inheritDoc}
2625 *
2626 * @return the previous value associated with the specified key,
2627 * or {@code null} if there was no mapping for the key
2628 * @throws NullPointerException if the specified key or value is null
2629 */
2630 public V putIfAbsent(K key, V value) {
2631 return internalPut(key, value, true);
2632 }
2633
2634 /**
2635 * Copies all of the mappings from the specified map to this one.
2636 * These mappings replace any mappings that this map had for any of the
2637 * keys currently in the specified map.
2638 *
2639 * @param m mappings to be stored in this map
2640 */
2641 public void putAll(Map<? extends K, ? extends V> m) {
2642 internalPutAll(m);
2643 }
2644
2645 /**
2646 * If the specified key is not already associated with a value (or
2647 * is mapped to {@code null}), attempts to compute its value using
2648 * the given mapping function and enters it into this map unless
2649 * {@code null}. The entire method invocation is performed
2650 * atomically, so the function is applied at most once per key.
2651 * Some attempted update operations on this map by other threads
2652 * may be blocked while computation is in progress, so the
2653 * computation should be short and simple, and must not attempt to
2654 * update any other mappings of this Map.
2655 *
2656 * @param key key with which the specified value is to be associated
2657 * @param mappingFunction the function to compute a value
2658 * @return the current (existing or computed) value associated with
2659 * the specified key, or null if the computed value is null
2660 * @throws NullPointerException if the specified key or mappingFunction
2661 * is null
2662 * @throws IllegalStateException if the computation detectably
2663 * attempts a recursive update to this map that would
2664 * otherwise never complete
2665 * @throws RuntimeException or Error if the mappingFunction does so,
2666 * in which case the mapping is left unestablished
2667 */
2668 public V computeIfAbsent
2669 (K key, Function<? super K, ? extends V> mappingFunction) {
2670 return internalComputeIfAbsent(key, mappingFunction);
2671 }
2672
2673 /**
2674 * If the value for the specified key is present and non-null,
2675 * attempts to compute a new mapping given the key and its current
2676 * mapped value. The entire method invocation is performed
2677 * atomically. Some attempted update operations on this map by
2678 * other threads may be blocked while computation is in progress,
2679 * so the computation should be short and simple, and must not
2680 * attempt to update any other mappings of this Map.
2681 *
2682 * @param key key with which the specified value is to be associated
2683 * @param remappingFunction the function to compute a value
2684 * @return the new value associated with the specified key, or null if none
2685 * @throws NullPointerException if the specified key or remappingFunction
2686 * is null
2687 * @throws IllegalStateException if the computation detectably
2688 * attempts a recursive update to this map that would
2689 * otherwise never complete
2690 * @throws RuntimeException or Error if the remappingFunction does so,
2691 * in which case the mapping is unchanged
2692 */
2693 public V computeIfPresent
2694 (K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
2695 return internalCompute(key, true, remappingFunction);
2696 }
2697
2698 /**
2699 * Attempts to compute a mapping for the specified key and its
2700 * current mapped value (or {@code null} if there is no current
2701 * mapping). The entire method invocation is performed atomically.
2702 * Some attempted update operations on this map by other threads
2703 * may be blocked while computation is in progress, so the
2704 * computation should be short and simple, and must not attempt to
2705 * update any other mappings of this Map.
2706 *
2707 * @param key key with which the specified value is to be associated
2708 * @param remappingFunction the function to compute a value
2709 * @return the new value associated with the specified key, or null if none
2710 * @throws NullPointerException if the specified key or remappingFunction
2711 * is null
2712 * @throws IllegalStateException if the computation detectably
2713 * attempts a recursive update to this map that would
2714 * otherwise never complete
2715 * @throws RuntimeException or Error if the remappingFunction does so,
2716 * in which case the mapping is unchanged
2717 */
2718 public V compute
2719 (K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
2720 return internalCompute(key, false, remappingFunction);
2721 }
2722
2723 /**
2724 * If the specified key is not already associated with a
2725 * (non-null) value, associates it with the given value.
2726 * Otherwise, replaces the value with the results of the given
2727 * remapping function, or removes if {@code null}. The entire
2728 * method invocation is performed atomically. Some attempted
2729 * update operations on this map by other threads may be blocked
2730 * while computation is in progress, so the computation should be
2731 * short and simple, and must not attempt to update any other
2732 * mappings of this Map.
2733 *
2734 * @param key key with which the specified value is to be associated
2735 * @param value the value to use if absent
2736 * @param remappingFunction the function to recompute a value if present
2737 * @return the new value associated with the specified key, or null if none
2738 * @throws NullPointerException if the specified key or the
2739 * remappingFunction is null
2740 * @throws RuntimeException or Error if the remappingFunction does so,
2741 * in which case the mapping is unchanged
2742 */
2743 public V merge
2744 (K key, V value,
2745 BiFunction<? super V, ? super V, ? extends V> remappingFunction) {
2746 return internalMerge(key, value, remappingFunction);
2747 }
2748
2749 /**
2750 * Removes the key (and its corresponding value) from this map.
2751 * This method does nothing if the key is not in the map.
2752 *
2753 * @param key the key that needs to be removed
2754 * @return the previous value associated with {@code key}, or
2755 * {@code null} if there was no mapping for {@code key}
2756 * @throws NullPointerException if the specified key is null
2757 */
2758 public V remove(Object key) {
2759 return internalReplace(key, null, null);
2760 }
2761
2762 /**
2763 * {@inheritDoc}
2764 *
2765 * @throws NullPointerException if the specified key is null
2766 */
2767 public boolean remove(Object key, Object value) {
2768 return value != null && internalReplace(key, null, value) != null;
2769 }
2770
2771 /**
2772 * {@inheritDoc}
2773 *
2774 * @throws NullPointerException if any of the arguments are null
2775 */
2776 public boolean replace(K key, V oldValue, V newValue) {
2777 if (key == null || oldValue == null || newValue == null)
2778 throw new NullPointerException();
2779 return internalReplace(key, newValue, oldValue) != null;
2780 }
2781
2782 /**
2783 * {@inheritDoc}
2784 *
2785 * @return the previous value associated with the specified key,
2786 * or {@code null} if there was no mapping for the key
2787 * @throws NullPointerException if the specified key or value is null
2788 */
2789 public V replace(K key, V value) {
2790 if (key == null || value == null)
2791 throw new NullPointerException();
2792 return internalReplace(key, value, null);
2793 }
2794
2795 /**
2796 * Removes all of the mappings from this map.
2797 */
2798 public void clear() {
2799 internalClear();
2800 }
2801
2802 /**
2803 * Returns a {@link Set} view of the keys contained in this map.
2804 * The set is backed by the map, so changes to the map are
2805 * reflected in the set, and vice-versa.
2806 *
2807 * @return the set view
2808 */
2809 public KeySetView<K,V> keySet() {
2810 KeySetView<K,V> ks = keySet;
2811 return (ks != null) ? ks : (keySet = new KeySetView<K,V>(this, null));
2812 }
2813
2814 /**
2815 * Returns a {@link Set} view of the keys in this map, using the
2816 * given common mapped value for any additions (i.e., {@link
2817 * Collection#add} and {@link Collection#addAll}). This is of
2818 * course only appropriate if it is acceptable to use the same
2819 * value for all additions from this view.
2820 *
2821 * @param mappedValue the mapped value to use for any
2822 * additions.
2823 * @return the set view
2824 * @throws NullPointerException if the mappedValue is null
2825 */
2826 public KeySetView<K,V> keySet(V mappedValue) {
2827 if (mappedValue == null)
2828 throw new NullPointerException();
2829 return new KeySetView<K,V>(this, mappedValue);
2830 }
2831
2832 /**
2833 * Returns a {@link Collection} view of the values contained in this map.
2834 * The collection is backed by the map, so changes to the map are
2835 * reflected in the collection, and vice-versa.
2836 */
2837 public ValuesView<K,V> values() {
2838 ValuesView<K,V> vs = values;
2839 return (vs != null) ? vs : (values = new ValuesView<K,V>(this));
2840 }
2841
2842 /**
2843 * Returns a {@link Set} view of the mappings contained in this map.
2844 * The set is backed by the map, so changes to the map are
2845 * reflected in the set, and vice-versa. The set supports element
2846 * removal, which removes the corresponding mapping from the map,
2847 * via the {@code Iterator.remove}, {@code Set.remove},
2848 * {@code removeAll}, {@code retainAll}, and {@code clear}
2849 * operations. It does not support the {@code add} or
2850 * {@code addAll} operations.
2851 *
2852 * <p>The view's {@code iterator} is a "weakly consistent" iterator
2853 * that will never throw {@link ConcurrentModificationException},
2854 * and guarantees to traverse elements as they existed upon
2855 * construction of the iterator, and may (but is not guaranteed to)
2856 * reflect any modifications subsequent to construction.
2857 */
2858 public Set<Map.Entry<K,V>> entrySet() {
2859 EntrySetView<K,V> es = entrySet;
2860 return (es != null) ? es : (entrySet = new EntrySetView<K,V>(this));
2861 }
2862
2863 /**
2864 * Returns an enumeration of the keys in this table.
2865 *
2866 * @return an enumeration of the keys in this table
2867 * @see #keySet()
2868 */
2869 public Enumeration<K> keys() {
2870 return new KeyIterator<K,V>(this);
2871 }
2872
2873 /**
2874 * Returns an enumeration of the values in this table.
2875 *
2876 * @return an enumeration of the values in this table
2877 * @see #values()
2878 */
2879 public Enumeration<V> elements() {
2880 return new ValueIterator<K,V>(this);
2881 }
2882
2883 /**
2884 * Returns the hash code value for this {@link Map}, i.e.,
2885 * the sum of, for each key-value pair in the map,
2886 * {@code key.hashCode() ^ value.hashCode()}.
2887 *
2888 * @return the hash code value for this map
2889 */
2890 public int hashCode() {
2891 int h = 0;
2892 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
2893 V v;
2894 while ((v = it.advance()) != null) {
2895 h += it.nextKey.hashCode() ^ v.hashCode();
2896 }
2897 return h;
2898 }
2899
2900 /**
2901 * Returns a string representation of this map. The string
2902 * representation consists of a list of key-value mappings (in no
2903 * particular order) enclosed in braces ("{@code {}}"). Adjacent
2904 * mappings are separated by the characters {@code ", "} (comma
2905 * and space). Each key-value mapping is rendered as the key
2906 * followed by an equals sign ("{@code =}") followed by the
2907 * associated value.
2908 *
2909 * @return a string representation of this map
2910 */
2911 public String toString() {
2912 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
2913 StringBuilder sb = new StringBuilder();
2914 sb.append('{');
2915 V v;
2916 if ((v = it.advance()) != null) {
2917 for (;;) {
2918 Object k = it.nextKey;
2919 sb.append(k == this ? "(this Map)" : k);
2920 sb.append('=');
2921 sb.append(v == this ? "(this Map)" : v);
2922 if ((v = it.advance()) == null)
2923 break;
2924 sb.append(',').append(' ');
2925 }
2926 }
2927 return sb.append('}').toString();
2928 }
2929
2930 /**
2931 * Compares the specified object with this map for equality.
2932 * Returns {@code true} if the given object is a map with the same
2933 * mappings as this map. This operation may return misleading
2934 * results if either map is concurrently modified during execution
2935 * of this method.
2936 *
2937 * @param o object to be compared for equality with this map
2938 * @return {@code true} if the specified object is equal to this map
2939 */
2940 public boolean equals(Object o) {
2941 if (o != this) {
2942 if (!(o instanceof Map))
2943 return false;
2944 Map<?,?> m = (Map<?,?>) o;
2945 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
2946 V val;
2947 while ((val = it.advance()) != null) {
2948 Object v = m.get(it.nextKey);
2949 if (v == null || (v != val && !v.equals(val)))
2950 return false;
2951 }
2952 for (Map.Entry<?,?> e : m.entrySet()) {
2953 Object mk, mv, v;
2954 if ((mk = e.getKey()) == null ||
2955 (mv = e.getValue()) == null ||
2956 (v = internalGet(mk)) == null ||
2957 (mv != v && !mv.equals(v)))
2958 return false;
2959 }
2960 }
2961 return true;
2962 }
2963
2964 /* ----------------Iterators -------------- */
2965
2966 @SuppressWarnings("serial") static final class KeyIterator<K,V>
2967 extends Traverser<K,V,Object>
2968 implements Spliterator<K>, Iterator<K>, Enumeration<K> {
2969 KeyIterator(ConcurrentHashMap<K, V> map) { super(map); }
2970 KeyIterator(ConcurrentHashMap<K, V> map, Traverser<K,V,Object> it) {
2971 super(map, it);
2972 }
2973 public KeyIterator<K,V> split() {
2974 if (nextKey != null)
2975 throw new IllegalStateException();
2976 return new KeyIterator<K,V>(map, this);
2977 }
2978 @SuppressWarnings("unchecked") public final K next() {
2979 if (nextVal == null && advance() == null)
2980 throw new NoSuchElementException();
2981 Object k = nextKey;
2982 nextVal = null;
2983 return (K) k;
2984 }
2985
2986 public final K nextElement() { return next(); }
2987
2988 public Iterator<K> iterator() { return this; }
2989
2990 public void forEach(Block<? super K> action) {
2991 if (action == null) throw new NullPointerException();
2992 while (advance() != null)
2993 action.accept((K)nextKey);
2994 }
2995 }
2996
2997 @SuppressWarnings("serial") static final class ValueIterator<K,V>
2998 extends Traverser<K,V,Object>
2999 implements Spliterator<V>, Iterator<V>, Enumeration<V> {
3000 ValueIterator(ConcurrentHashMap<K, V> map) { super(map); }
3001 ValueIterator(ConcurrentHashMap<K, V> map, Traverser<K,V,Object> it) {
3002 super(map, it);
3003 }
3004 public ValueIterator<K,V> split() {
3005 if (nextKey != null)
3006 throw new IllegalStateException();
3007 return new ValueIterator<K,V>(map, this);
3008 }
3009
3010 public final V next() {
3011 V v;
3012 if ((v = nextVal) == null && (v = advance()) == null)
3013 throw new NoSuchElementException();
3014 nextVal = null;
3015 return v;
3016 }
3017
3018 public final V nextElement() { return next(); }
3019
3020 public Iterator<V> iterator() { return this; }
3021
3022 public void forEach(Block<? super V> action) {
3023 if (action == null) throw new NullPointerException();
3024 V v;
3025 while ((v = advance()) != null)
3026 action.accept(v);
3027 }
3028 }
3029
3030 @SuppressWarnings("serial") static final class EntryIterator<K,V>
3031 extends Traverser<K,V,Object>
3032 implements Spliterator<Map.Entry<K,V>>, Iterator<Map.Entry<K,V>> {
3033 EntryIterator(ConcurrentHashMap<K, V> map) { super(map); }
3034 EntryIterator(ConcurrentHashMap<K, V> map, Traverser<K,V,Object> it) {
3035 super(map, it);
3036 }
3037 public EntryIterator<K,V> split() {
3038 if (nextKey != null)
3039 throw new IllegalStateException();
3040 return new EntryIterator<K,V>(map, this);
3041 }
3042
3043 @SuppressWarnings("unchecked") public final Map.Entry<K,V> next() {
3044 V v;
3045 if ((v = nextVal) == null && (v = advance()) == null)
3046 throw new NoSuchElementException();
3047 Object k = nextKey;
3048 nextVal = null;
3049 return new MapEntry<K,V>((K)k, v, map);
3050 }
3051
3052 public Iterator<Map.Entry<K,V>> iterator() { return this; }
3053
3054 public void forEach(Block<? super Map.Entry<K,V>> action) {
3055 if (action == null) throw new NullPointerException();
3056 V v;
3057 while ((v = advance()) != null)
3058 action.accept(entryFor((K)nextKey, v));
3059 }
3060 }
3061
3062 /**
3063 * Exported Entry for iterators
3064 */
3065 static final class MapEntry<K,V> implements Map.Entry<K, V> {
3066 final K key; // non-null
3067 V val; // non-null
3068 final ConcurrentHashMap<K, V> map;
3069 MapEntry(K key, V val, ConcurrentHashMap<K, V> map) {
3070 this.key = key;
3071 this.val = val;
3072 this.map = map;
3073 }
3074 public final K getKey() { return key; }
3075 public final V getValue() { return val; }
3076 public final int hashCode() { return key.hashCode() ^ val.hashCode(); }
3077 public final String toString(){ return key + "=" + val; }
3078
3079 public final boolean equals(Object o) {
3080 Object k, v; Map.Entry<?,?> e;
3081 return ((o instanceof Map.Entry) &&
3082 (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
3083 (v = e.getValue()) != null &&
3084 (k == key || k.equals(key)) &&
3085 (v == val || v.equals(val)));
3086 }
3087
3088 /**
3089 * Sets our entry's value and writes through to the map. The
3090 * value to return is somewhat arbitrary here. Since we do not
3091 * necessarily track asynchronous changes, the most recent
3092 * "previous" value could be different from what we return (or
3093 * could even have been removed in which case the put will
3094 * re-establish). We do not and cannot guarantee more.
3095 */
3096 public final V setValue(V value) {
3097 if (value == null) throw new NullPointerException();
3098 V v = val;
3099 val = value;
3100 map.put(key, value);
3101 return v;
3102 }
3103 }
3104
3105 /**
3106 * Returns exportable snapshot entry for the given key and value
3107 * when write-through can't or shouldn't be used.
3108 */
3109 static <K,V> AbstractMap.SimpleEntry<K,V> entryFor(K k, V v) {
3110 return new AbstractMap.SimpleEntry<K,V>(k, v);
3111 }
3112
3113 /* ---------------- Serialization Support -------------- */
3114
3115 /**
3116 * Stripped-down version of helper class used in previous version,
3117 * declared for the sake of serialization compatibility
3118 */
3119 static class Segment<K,V> implements Serializable {
3120 private static final long serialVersionUID = 2249069246763182397L;
3121 final float loadFactor;
3122 Segment(float lf) { this.loadFactor = lf; }
3123 }
3124
3125 /**
3126 * Saves the state of the {@code ConcurrentHashMap} instance to a
3127 * stream (i.e., serializes it).
3128 * @param s the stream
3129 * @serialData
3130 * the key (Object) and value (Object)
3131 * for each key-value mapping, followed by a null pair.
3132 * The key-value mappings are emitted in no particular order.
3133 */
3134 @SuppressWarnings("unchecked") private void writeObject
3135 (java.io.ObjectOutputStream s)
3136 throws java.io.IOException {
3137 if (segments == null) { // for serialization compatibility
3138 segments = (Segment<K,V>[])
3139 new Segment<?,?>[DEFAULT_CONCURRENCY_LEVEL];
3140 for (int i = 0; i < segments.length; ++i)
3141 segments[i] = new Segment<K,V>(LOAD_FACTOR);
3142 }
3143 s.defaultWriteObject();
3144 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3145 V v;
3146 while ((v = it.advance()) != null) {
3147 s.writeObject(it.nextKey);
3148 s.writeObject(v);
3149 }
3150 s.writeObject(null);
3151 s.writeObject(null);
3152 segments = null; // throw away
3153 }
3154
3155 /**
3156 * Reconstitutes the instance from a stream (that is, deserializes it).
3157 * @param s the stream
3158 */
3159 @SuppressWarnings("unchecked") private void readObject
3160 (java.io.ObjectInputStream s)
3161 throws java.io.IOException, ClassNotFoundException {
3162 s.defaultReadObject();
3163 this.segments = null; // unneeded
3164
3165 // Create all nodes, then place in table once size is known
3166 long size = 0L;
3167 Node<V> p = null;
3168 for (;;) {
3169 K k = (K) s.readObject();
3170 V v = (V) s.readObject();
3171 if (k != null && v != null) {
3172 int h = spread(k.hashCode());
3173 p = new Node<V>(h, k, v, p);
3174 ++size;
3175 }
3176 else
3177 break;
3178 }
3179 if (p != null) {
3180 boolean init = false;
3181 int n;
3182 if (size >= (long)(MAXIMUM_CAPACITY >>> 1))
3183 n = MAXIMUM_CAPACITY;
3184 else {
3185 int sz = (int)size;
3186 n = tableSizeFor(sz + (sz >>> 1) + 1);
3187 }
3188 int sc = sizeCtl;
3189 boolean collide = false;
3190 if (n > sc &&
3191 U.compareAndSwapInt(this, SIZECTL, sc, -1)) {
3192 try {
3193 if (table == null) {
3194 init = true;
3195 @SuppressWarnings("rawtypes") Node[] rt = new Node[n];
3196 Node<V>[] tab = (Node<V>[])rt;
3197 int mask = n - 1;
3198 while (p != null) {
3199 int j = p.hash & mask;
3200 Node<V> next = p.next;
3201 Node<V> q = p.next = tabAt(tab, j);
3202 setTabAt(tab, j, p);
3203 if (!collide && q != null && q.hash == p.hash)
3204 collide = true;
3205 p = next;
3206 }
3207 table = tab;
3208 addCount(size, -1);
3209 sc = n - (n >>> 2);
3210 }
3211 } finally {
3212 sizeCtl = sc;
3213 }
3214 if (collide) { // rescan and convert to TreeBins
3215 Node<V>[] tab = table;
3216 for (int i = 0; i < tab.length; ++i) {
3217 int c = 0;
3218 for (Node<V> e = tabAt(tab, i); e != null; e = e.next) {
3219 if (++c > TREE_THRESHOLD &&
3220 (e.key instanceof Comparable)) {
3221 replaceWithTreeBin(tab, i, e.key);
3222 break;
3223 }
3224 }
3225 }
3226 }
3227 }
3228 if (!init) { // Can only happen if unsafely published.
3229 while (p != null) {
3230 internalPut((K)p.key, p.val, false);
3231 p = p.next;
3232 }
3233 }
3234 }
3235 }
3236
3237 // -------------------------------------------------------
3238
3239 // Sequential bulk operations
3240
3241 /**
3242 * Performs the given action for each (key, value).
3243 *
3244 * @param action the action
3245 */
3246 @SuppressWarnings("unchecked") public void forEachSequentially
3247 (BiBlock<? super K, ? super V> action) {
3248 if (action == null) throw new NullPointerException();
3249 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3250 V v;
3251 while ((v = it.advance()) != null)
3252 action.accept((K)it.nextKey, v);
3253 }
3254
3255 /**
3256 * Performs the given action for each non-null transformation
3257 * of each (key, value).
3258 *
3259 * @param transformer a function returning the transformation
3260 * for an element, or null of there is no transformation (in
3261 * which case the action is not applied).
3262 * @param action the action
3263 */
3264 @SuppressWarnings("unchecked") public <U> void forEachSequentially
3265 (BiFunction<? super K, ? super V, ? extends U> transformer,
3266 Block<? super U> action) {
3267 if (transformer == null || action == null)
3268 throw new NullPointerException();
3269 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3270 V v; U u;
3271 while ((v = it.advance()) != null) {
3272 if ((u = transformer.apply((K)it.nextKey, v)) != null)
3273 action.accept(u);
3274 }
3275 }
3276
3277 /**
3278 * Returns a non-null result from applying the given search
3279 * function on each (key, value), or null if none.
3280 *
3281 * @param searchFunction a function returning a non-null
3282 * result on success, else null
3283 * @return a non-null result from applying the given search
3284 * function on each (key, value), or null if none
3285 */
3286 @SuppressWarnings("unchecked") public <U> U searchSequentially
3287 (BiFunction<? super K, ? super V, ? extends U> searchFunction) {
3288 if (searchFunction == null) throw new NullPointerException();
3289 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3290 V v; U u;
3291 while ((v = it.advance()) != null) {
3292 if ((u = searchFunction.apply((K)it.nextKey, v)) != null)
3293 return u;
3294 }
3295 return null;
3296 }
3297
3298 /**
3299 * Returns the result of accumulating the given transformation
3300 * of all (key, value) pairs using the given reducer to
3301 * combine values, or null if none.
3302 *
3303 * @param transformer a function returning the transformation
3304 * for an element, or null of there is no transformation (in
3305 * which case it is not combined).
3306 * @param reducer a commutative associative combining function
3307 * @return the result of accumulating the given transformation
3308 * of all (key, value) pairs
3309 */
3310 @SuppressWarnings("unchecked") public <U> U reduceSequentially
3311 (BiFunction<? super K, ? super V, ? extends U> transformer,
3312 BiFunction<? super U, ? super U, ? extends U> reducer) {
3313 if (transformer == null || reducer == null)
3314 throw new NullPointerException();
3315 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3316 U r = null, u; V v;
3317 while ((v = it.advance()) != null) {
3318 if ((u = transformer.apply((K)it.nextKey, v)) != null)
3319 r = (r == null) ? u : reducer.apply(r, u);
3320 }
3321 return r;
3322 }
3323
3324 /**
3325 * Returns the result of accumulating the given transformation
3326 * of all (key, value) pairs using the given reducer to
3327 * combine values, and the given basis as an identity value.
3328 *
3329 * @param transformer a function returning the transformation
3330 * for an element
3331 * @param basis the identity (initial default value) for the reduction
3332 * @param reducer a commutative associative combining function
3333 * @return the result of accumulating the given transformation
3334 * of all (key, value) pairs
3335 */
3336 @SuppressWarnings("unchecked") public double reduceToDoubleSequentially
3337 (DoubleBiFunction<? super K, ? super V> transformer,
3338 double basis,
3339 DoubleBinaryOperator reducer) {
3340 if (transformer == null || reducer == null)
3341 throw new NullPointerException();
3342 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3343 double r = basis; V v;
3344 while ((v = it.advance()) != null)
3345 r = reducer.applyAsDouble(r, transformer.applyAsDouble((K)it.nextKey, v));
3346 return r;
3347 }
3348
3349 /**
3350 * Returns the result of accumulating the given transformation
3351 * of all (key, value) pairs using the given reducer to
3352 * combine values, and the given basis as an identity value.
3353 *
3354 * @param transformer a function returning the transformation
3355 * for an element
3356 * @param basis the identity (initial default value) for the reduction
3357 * @param reducer a commutative associative combining function
3358 * @return the result of accumulating the given transformation
3359 * of all (key, value) pairs
3360 */
3361 @SuppressWarnings("unchecked") public long reduceToLongSequentially
3362 (LongBiFunction<? super K, ? super V> transformer,
3363 long basis,
3364 LongBinaryOperator reducer) {
3365 if (transformer == null || reducer == null)
3366 throw new NullPointerException();
3367 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3368 long r = basis; V v;
3369 while ((v = it.advance()) != null)
3370 r = reducer.applyAsLong(r, transformer.applyAsLong((K)it.nextKey, v));
3371 return r;
3372 }
3373
3374 /**
3375 * Returns the result of accumulating the given transformation
3376 * of all (key, value) pairs using the given reducer to
3377 * combine values, and the given basis as an identity value.
3378 *
3379 * @param transformer a function returning the transformation
3380 * for an element
3381 * @param basis the identity (initial default value) for the reduction
3382 * @param reducer a commutative associative combining function
3383 * @return the result of accumulating the given transformation
3384 * of all (key, value) pairs
3385 */
3386 @SuppressWarnings("unchecked") public int reduceToIntSequentially
3387 (IntBiFunction<? super K, ? super V> transformer,
3388 int basis,
3389 IntBinaryOperator reducer) {
3390 if (transformer == null || reducer == null)
3391 throw new NullPointerException();
3392 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3393 int r = basis; V v;
3394 while ((v = it.advance()) != null)
3395 r = reducer.applyAsInt(r, transformer.applyAsInt((K)it.nextKey, v));
3396 return r;
3397 }
3398
3399 /**
3400 * Performs the given action for each key.
3401 *
3402 * @param action the action
3403 */
3404 @SuppressWarnings("unchecked") public void forEachKeySequentially
3405 (Block<? super K> action) {
3406 if (action == null) throw new NullPointerException();
3407 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3408 while (it.advance() != null)
3409 action.accept((K)it.nextKey);
3410 }
3411
3412 /**
3413 * Performs the given action for each non-null transformation
3414 * of each key.
3415 *
3416 * @param transformer a function returning the transformation
3417 * for an element, or null of there is no transformation (in
3418 * which case the action is not applied).
3419 * @param action the action
3420 */
3421 @SuppressWarnings("unchecked") public <U> void forEachKeySequentially
3422 (Function<? super K, ? extends U> transformer,
3423 Block<? super U> action) {
3424 if (transformer == null || action == null)
3425 throw new NullPointerException();
3426 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3427 U u;
3428 while (it.advance() != null) {
3429 if ((u = transformer.apply((K)it.nextKey)) != null)
3430 action.accept(u);
3431 }
3432 ForkJoinTasks.forEachKey
3433 (this, transformer, action).invoke();
3434 }
3435
3436 /**
3437 * Returns a non-null result from applying the given search
3438 * function on each key, or null if none.
3439 *
3440 * @param searchFunction a function returning a non-null
3441 * result on success, else null
3442 * @return a non-null result from applying the given search
3443 * function on each key, or null if none
3444 */
3445 @SuppressWarnings("unchecked") public <U> U searchKeysSequentially
3446 (Function<? super K, ? extends U> searchFunction) {
3447 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3448 U u;
3449 while (it.advance() != null) {
3450 if ((u = searchFunction.apply((K)it.nextKey)) != null)
3451 return u;
3452 }
3453 return null;
3454 }
3455
3456 /**
3457 * Returns the result of accumulating all keys using the given
3458 * reducer to combine values, or null if none.
3459 *
3460 * @param reducer a commutative associative combining function
3461 * @return the result of accumulating all keys using the given
3462 * reducer to combine values, or null if none
3463 */
3464 @SuppressWarnings("unchecked") public K reduceKeysSequentially
3465 (BiFunction<? super K, ? super K, ? extends K> reducer) {
3466 if (reducer == null) throw new NullPointerException();
3467 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3468 K r = null;
3469 while (it.advance() != null) {
3470 K u = (K)it.nextKey;
3471 r = (r == null) ? u : reducer.apply(r, u);
3472 }
3473 return r;
3474 }
3475
3476 /**
3477 * Returns the result of accumulating the given transformation
3478 * of all keys using the given reducer to combine values, or
3479 * null if none.
3480 *
3481 * @param transformer a function returning the transformation
3482 * for an element, or null of there is no transformation (in
3483 * which case it is not combined).
3484 * @param reducer a commutative associative combining function
3485 * @return the result of accumulating the given transformation
3486 * of all keys
3487 */
3488 @SuppressWarnings("unchecked") public <U> U reduceKeysSequentially
3489 (Function<? super K, ? extends U> transformer,
3490 BiFunction<? super U, ? super U, ? extends U> reducer) {
3491 if (transformer == null || reducer == null)
3492 throw new NullPointerException();
3493 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3494 U r = null, u;
3495 while (it.advance() != null) {
3496 if ((u = transformer.apply((K)it.nextKey)) != null)
3497 r = (r == null) ? u : reducer.apply(r, u);
3498 }
3499 return r;
3500 }
3501
3502 /**
3503 * Returns the result of accumulating the given transformation
3504 * of all keys using the given reducer to combine values, and
3505 * the given basis as an identity value.
3506 *
3507 * @param transformer a function returning the transformation
3508 * for an element
3509 * @param basis the identity (initial default value) for the reduction
3510 * @param reducer a commutative associative combining function
3511 * @return the result of accumulating the given transformation
3512 * of all keys
3513 */
3514 @SuppressWarnings("unchecked") public double reduceKeysToDoubleSequentially
3515 (DoubleFunction<? super K> transformer,
3516 double basis,
3517 DoubleBinaryOperator reducer) {
3518 if (transformer == null || reducer == null)
3519 throw new NullPointerException();
3520 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3521 double r = basis;
3522 while (it.advance() != null)
3523 r = reducer.applyAsDouble(r, transformer.applyAsDouble((K)it.nextKey));
3524 return r;
3525 }
3526
3527 /**
3528 * Returns the result of accumulating the given transformation
3529 * of all keys using the given reducer to combine values, and
3530 * the given basis as an identity value.
3531 *
3532 * @param transformer a function returning the transformation
3533 * for an element
3534 * @param basis the identity (initial default value) for the reduction
3535 * @param reducer a commutative associative combining function
3536 * @return the result of accumulating the given transformation
3537 * of all keys
3538 */
3539 @SuppressWarnings("unchecked") public long reduceKeysToLongSequentially
3540 (LongFunction<? super K> transformer,
3541 long basis,
3542 LongBinaryOperator reducer) {
3543 if (transformer == null || reducer == null)
3544 throw new NullPointerException();
3545 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3546 long r = basis;
3547 while (it.advance() != null)
3548 r = reducer.applyAsLong(r, transformer.applyAsLong((K)it.nextKey));
3549 return r;
3550 }
3551
3552 /**
3553 * Returns the result of accumulating the given transformation
3554 * of all keys using the given reducer to combine values, and
3555 * the given basis as an identity value.
3556 *
3557 * @param transformer a function returning the transformation
3558 * for an element
3559 * @param basis the identity (initial default value) for the reduction
3560 * @param reducer a commutative associative combining function
3561 * @return the result of accumulating the given transformation
3562 * of all keys
3563 */
3564 @SuppressWarnings("unchecked") public int reduceKeysToIntSequentially
3565 (IntFunction<? super K> transformer,
3566 int basis,
3567 IntBinaryOperator reducer) {
3568 if (transformer == null || reducer == null)
3569 throw new NullPointerException();
3570 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3571 int r = basis;
3572 while (it.advance() != null)
3573 r = reducer.applyAsInt(r, transformer.applyAsInt((K)it.nextKey));
3574 return r;
3575 }
3576
3577 /**
3578 * Performs the given action for each value.
3579 *
3580 * @param action the action
3581 */
3582 public void forEachValueSequentially(Block<? super V> action) {
3583 if (action == null) throw new NullPointerException();
3584 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3585 V v;
3586 while ((v = it.advance()) != null)
3587 action.accept(v);
3588 }
3589
3590 /**
3591 * Performs the given action for each non-null transformation
3592 * of each value.
3593 *
3594 * @param transformer a function returning the transformation
3595 * for an element, or null of there is no transformation (in
3596 * which case the action is not applied).
3597 */
3598 public <U> void forEachValueSequentially
3599 (Function<? super V, ? extends U> transformer,
3600 Block<? super U> action) {
3601 if (transformer == null || action == null)
3602 throw new NullPointerException();
3603 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3604 V v; U u;
3605 while ((v = it.advance()) != null) {
3606 if ((u = transformer.apply(v)) != null)
3607 action.accept(u);
3608 }
3609 }
3610
3611 /**
3612 * Returns a non-null result from applying the given search
3613 * function on each value, or null if none.
3614 *
3615 * @param searchFunction a function returning a non-null
3616 * result on success, else null
3617 * @return a non-null result from applying the given search
3618 * function on each value, or null if none
3619 *
3620 */
3621 public <U> U searchValuesSequentially
3622 (Function<? super V, ? extends U> searchFunction) {
3623 if (searchFunction == null) throw new NullPointerException();
3624 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3625 V v; U u;
3626 while ((v = it.advance()) != null) {
3627 if ((u = searchFunction.apply(v)) != null)
3628 return u;
3629 }
3630 return null;
3631 }
3632
3633 /**
3634 * Returns the result of accumulating all values using the
3635 * given reducer to combine values, or null if none.
3636 *
3637 * @param reducer a commutative associative combining function
3638 * @return the result of accumulating all values
3639 */
3640 public V reduceValuesSequentially
3641 (BiFunction<? super V, ? super V, ? extends V> reducer) {
3642 if (reducer == null) throw new NullPointerException();
3643 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3644 V r = null; V v;
3645 while ((v = it.advance()) != null)
3646 r = (r == null) ? v : reducer.apply(r, v);
3647 return r;
3648 }
3649
3650 /**
3651 * Returns the result of accumulating the given transformation
3652 * of all values using the given reducer to combine values, or
3653 * null if none.
3654 *
3655 * @param transformer a function returning the transformation
3656 * for an element, or null of there is no transformation (in
3657 * which case it is not combined).
3658 * @param reducer a commutative associative combining function
3659 * @return the result of accumulating the given transformation
3660 * of all values
3661 */
3662 public <U> U reduceValuesSequentially
3663 (Function<? super V, ? extends U> transformer,
3664 BiFunction<? super U, ? super U, ? extends U> reducer) {
3665 if (transformer == null || reducer == null)
3666 throw new NullPointerException();
3667 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3668 U r = null, u; V v;
3669 while ((v = it.advance()) != null) {
3670 if ((u = transformer.apply(v)) != null)
3671 r = (r == null) ? u : reducer.apply(r, u);
3672 }
3673 return r;
3674 }
3675
3676 /**
3677 * Returns the result of accumulating the given transformation
3678 * of all values using the given reducer to combine values,
3679 * and the given basis as an identity value.
3680 *
3681 * @param transformer a function returning the transformation
3682 * for an element
3683 * @param basis the identity (initial default value) for the reduction
3684 * @param reducer a commutative associative combining function
3685 * @return the result of accumulating the given transformation
3686 * of all values
3687 */
3688 public double reduceValuesToDoubleSequentially
3689 (DoubleFunction<? super V> transformer,
3690 double basis,
3691 DoubleBinaryOperator reducer) {
3692 if (transformer == null || reducer == null)
3693 throw new NullPointerException();
3694 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3695 double r = basis; V v;
3696 while ((v = it.advance()) != null)
3697 r = reducer.applyAsDouble(r, transformer.applyAsDouble(v));
3698 return r;
3699 }
3700
3701 /**
3702 * Returns the result of accumulating the given transformation
3703 * of all values using the given reducer to combine values,
3704 * and the given basis as an identity value.
3705 *
3706 * @param transformer a function returning the transformation
3707 * for an element
3708 * @param basis the identity (initial default value) for the reduction
3709 * @param reducer a commutative associative combining function
3710 * @return the result of accumulating the given transformation
3711 * of all values
3712 */
3713 public long reduceValuesToLongSequentially
3714 (LongFunction<? super V> transformer,
3715 long basis,
3716 LongBinaryOperator reducer) {
3717 if (transformer == null || reducer == null)
3718 throw new NullPointerException();
3719 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3720 long r = basis; V v;
3721 while ((v = it.advance()) != null)
3722 r = reducer.applyAsLong(r, transformer.applyAsLong(v));
3723 return r;
3724 }
3725
3726 /**
3727 * Returns the result of accumulating the given transformation
3728 * of all values using the given reducer to combine values,
3729 * and the given basis as an identity value.
3730 *
3731 * @param transformer a function returning the transformation
3732 * for an element
3733 * @param basis the identity (initial default value) for the reduction
3734 * @param reducer a commutative associative combining function
3735 * @return the result of accumulating the given transformation
3736 * of all values
3737 */
3738 public int reduceValuesToIntSequentially
3739 (IntFunction<? super V> transformer,
3740 int basis,
3741 IntBinaryOperator reducer) {
3742 if (transformer == null || reducer == null)
3743 throw new NullPointerException();
3744 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3745 int r = basis; V v;
3746 while ((v = it.advance()) != null)
3747 r = reducer.applyAsInt(r, transformer.applyAsInt(v));
3748 return r;
3749 }
3750
3751 /**
3752 * Performs the given action for each entry.
3753 *
3754 * @param action the action
3755 */
3756 @SuppressWarnings("unchecked") public void forEachEntrySequentially
3757 (Block<? super Map.Entry<K,V>> action) {
3758 if (action == null) throw new NullPointerException();
3759 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3760 V v;
3761 while ((v = it.advance()) != null)
3762 action.accept(entryFor((K)it.nextKey, v));
3763 }
3764
3765 /**
3766 * Performs the given action for each non-null transformation
3767 * of each entry.
3768 *
3769 * @param transformer a function returning the transformation
3770 * for an element, or null of there is no transformation (in
3771 * which case the action is not applied).
3772 * @param action the action
3773 */
3774 @SuppressWarnings("unchecked") public <U> void forEachEntrySequentially
3775 (Function<Map.Entry<K,V>, ? extends U> transformer,
3776 Block<? super U> action) {
3777 if (transformer == null || action == null)
3778 throw new NullPointerException();
3779 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3780 V v; U u;
3781 while ((v = it.advance()) != null) {
3782 if ((u = transformer.apply(entryFor((K)it.nextKey, v))) != null)
3783 action.accept(u);
3784 }
3785 }
3786
3787 /**
3788 * Returns a non-null result from applying the given search
3789 * function on each entry, or null if none.
3790 *
3791 * @param searchFunction a function returning a non-null
3792 * result on success, else null
3793 * @return a non-null result from applying the given search
3794 * function on each entry, or null if none
3795 */
3796 @SuppressWarnings("unchecked") public <U> U searchEntriesSequentially
3797 (Function<Map.Entry<K,V>, ? extends U> searchFunction) {
3798 if (searchFunction == null) throw new NullPointerException();
3799 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3800 V v; U u;
3801 while ((v = it.advance()) != null) {
3802 if ((u = searchFunction.apply(entryFor((K)it.nextKey, v))) != null)
3803 return u;
3804 }
3805 return null;
3806 }
3807
3808 /**
3809 * Returns the result of accumulating all entries using the
3810 * given reducer to combine values, or null if none.
3811 *
3812 * @param reducer a commutative associative combining function
3813 * @return the result of accumulating all entries
3814 */
3815 @SuppressWarnings("unchecked") public Map.Entry<K,V> reduceEntriesSequentially
3816 (BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
3817 if (reducer == null) throw new NullPointerException();
3818 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3819 Map.Entry<K,V> r = null; V v;
3820 while ((v = it.advance()) != null) {
3821 Map.Entry<K,V> u = entryFor((K)it.nextKey, v);
3822 r = (r == null) ? u : reducer.apply(r, u);
3823 }
3824 return r;
3825 }
3826
3827 /**
3828 * Returns the result of accumulating the given transformation
3829 * of all entries using the given reducer to combine values,
3830 * or null if none.
3831 *
3832 * @param transformer a function returning the transformation
3833 * for an element, or null of there is no transformation (in
3834 * which case it is not combined).
3835 * @param reducer a commutative associative combining function
3836 * @return the result of accumulating the given transformation
3837 * of all entries
3838 */
3839 @SuppressWarnings("unchecked") public <U> U reduceEntriesSequentially
3840 (Function<Map.Entry<K,V>, ? extends U> transformer,
3841 BiFunction<? super U, ? super U, ? extends U> reducer) {
3842 if (transformer == null || reducer == null)
3843 throw new NullPointerException();
3844 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3845 U r = null, u; V v;
3846 while ((v = it.advance()) != null) {
3847 if ((u = transformer.apply(entryFor((K)it.nextKey, v))) != null)
3848 r = (r == null) ? u : reducer.apply(r, u);
3849 }
3850 return r;
3851 }
3852
3853 /**
3854 * Returns the result of accumulating the given transformation
3855 * of all entries using the given reducer to combine values,
3856 * and the given basis as an identity value.
3857 *
3858 * @param transformer a function returning the transformation
3859 * for an element
3860 * @param basis the identity (initial default value) for the reduction
3861 * @param reducer a commutative associative combining function
3862 * @return the result of accumulating the given transformation
3863 * of all entries
3864 */
3865 @SuppressWarnings("unchecked") public double reduceEntriesToDoubleSequentially
3866 (DoubleFunction<Map.Entry<K,V>> transformer,
3867 double basis,
3868 DoubleBinaryOperator reducer) {
3869 if (transformer == null || reducer == null)
3870 throw new NullPointerException();
3871 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3872 double r = basis; V v;
3873 while ((v = it.advance()) != null)
3874 r = reducer.applyAsDouble(r, transformer.applyAsDouble(entryFor((K)it.nextKey, v)));
3875 return r;
3876 }
3877
3878 /**
3879 * Returns the result of accumulating the given transformation
3880 * of all entries using the given reducer to combine values,
3881 * and the given basis as an identity value.
3882 *
3883 * @param transformer a function returning the transformation
3884 * for an element
3885 * @param basis the identity (initial default value) for the reduction
3886 * @param reducer a commutative associative combining function
3887 * @return the result of accumulating the given transformation
3888 * of all entries
3889 */
3890 @SuppressWarnings("unchecked") public long reduceEntriesToLongSequentially
3891 (LongFunction<Map.Entry<K,V>> transformer,
3892 long basis,
3893 LongBinaryOperator reducer) {
3894 if (transformer == null || reducer == null)
3895 throw new NullPointerException();
3896 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3897 long r = basis; V v;
3898 while ((v = it.advance()) != null)
3899 r = reducer.applyAsLong(r, transformer.applyAsLong(entryFor((K)it.nextKey, v)));
3900 return r;
3901 }
3902
3903 /**
3904 * Returns the result of accumulating the given transformation
3905 * of all entries using the given reducer to combine values,
3906 * and the given basis as an identity value.
3907 *
3908 * @param transformer a function returning the transformation
3909 * for an element
3910 * @param basis the identity (initial default value) for the reduction
3911 * @param reducer a commutative associative combining function
3912 * @return the result of accumulating the given transformation
3913 * of all entries
3914 */
3915 @SuppressWarnings("unchecked") public int reduceEntriesToIntSequentially
3916 (IntFunction<Map.Entry<K,V>> transformer,
3917 int basis,
3918 IntBinaryOperator reducer) {
3919 if (transformer == null || reducer == null)
3920 throw new NullPointerException();
3921 Traverser<K,V,Object> it = new Traverser<K,V,Object>(this);
3922 int r = basis; V v;
3923 while ((v = it.advance()) != null)
3924 r = reducer.applyAsInt(r, transformer.applyAsInt(entryFor((K)it.nextKey, v)));
3925 return r;
3926 }
3927
3928 // Parallel bulk operations
3929
3930 /**
3931 * Performs the given action for each (key, value).
3932 *
3933 * @param action the action
3934 */
3935 public void forEachInParallel(BiBlock<? super K,? super V> action) {
3936 ForkJoinTasks.forEach
3937 (this, action).invoke();
3938 }
3939
3940 /**
3941 * Performs the given action for each non-null transformation
3942 * of each (key, value).
3943 *
3944 * @param transformer a function returning the transformation
3945 * for an element, or null of there is no transformation (in
3946 * which case the action is not applied).
3947 * @param action the action
3948 */
3949 public <U> void forEachInParallel
3950 (BiFunction<? super K, ? super V, ? extends U> transformer,
3951 Block<? super U> action) {
3952 ForkJoinTasks.forEach
3953 (this, transformer, action).invoke();
3954 }
3955
3956 /**
3957 * Returns a non-null result from applying the given search
3958 * function on each (key, value), or null if none. Upon
3959 * success, further element processing is suppressed and the
3960 * results of any other parallel invocations of the search
3961 * function are ignored.
3962 *
3963 * @param searchFunction a function returning a non-null
3964 * result on success, else null
3965 * @return a non-null result from applying the given search
3966 * function on each (key, value), or null if none
3967 */
3968 public <U> U searchInParallel
3969 (BiFunction<? super K, ? super V, ? extends U> searchFunction) {
3970 return ForkJoinTasks.search
3971 (this, searchFunction).invoke();
3972 }
3973
3974 /**
3975 * Returns the result of accumulating the given transformation
3976 * of all (key, value) pairs using the given reducer to
3977 * combine values, or null if none.
3978 *
3979 * @param transformer a function returning the transformation
3980 * for an element, or null of there is no transformation (in
3981 * which case it is not combined).
3982 * @param reducer a commutative associative combining function
3983 * @return the result of accumulating the given transformation
3984 * of all (key, value) pairs
3985 */
3986 public <U> U reduceInParallel
3987 (BiFunction<? super K, ? super V, ? extends U> transformer,
3988 BiFunction<? super U, ? super U, ? extends U> reducer) {
3989 return ForkJoinTasks.reduce
3990 (this, transformer, reducer).invoke();
3991 }
3992
3993 /**
3994 * Returns the result of accumulating the given transformation
3995 * of all (key, value) pairs using the given reducer to
3996 * combine values, and the given basis as an identity value.
3997 *
3998 * @param transformer a function returning the transformation
3999 * for an element
4000 * @param basis the identity (initial default value) for the reduction
4001 * @param reducer a commutative associative combining function
4002 * @return the result of accumulating the given transformation
4003 * of all (key, value) pairs
4004 */
4005 public double reduceToDoubleInParallel
4006 (DoubleBiFunction<? super K, ? super V> transformer,
4007 double basis,
4008 DoubleBinaryOperator reducer) {
4009 return ForkJoinTasks.reduceToDouble
4010 (this, transformer, basis, reducer).invoke();
4011 }
4012
4013 /**
4014 * Returns the result of accumulating the given transformation
4015 * of all (key, value) pairs using the given reducer to
4016 * combine values, and the given basis as an identity value.
4017 *
4018 * @param transformer a function returning the transformation
4019 * for an element
4020 * @param basis the identity (initial default value) for the reduction
4021 * @param reducer a commutative associative combining function
4022 * @return the result of accumulating the given transformation
4023 * of all (key, value) pairs
4024 */
4025 public long reduceToLongInParallel
4026 (LongBiFunction<? super K, ? super V> transformer,
4027 long basis,
4028 LongBinaryOperator reducer) {
4029 return ForkJoinTasks.reduceToLong
4030 (this, transformer, basis, reducer).invoke();
4031 }
4032
4033 /**
4034 * Returns the result of accumulating the given transformation
4035 * of all (key, value) pairs using the given reducer to
4036 * combine values, and the given basis as an identity value.
4037 *
4038 * @param transformer a function returning the transformation
4039 * for an element
4040 * @param basis the identity (initial default value) for the reduction
4041 * @param reducer a commutative associative combining function
4042 * @return the result of accumulating the given transformation
4043 * of all (key, value) pairs
4044 */
4045 public int reduceToIntInParallel
4046 (IntBiFunction<? super K, ? super V> transformer,
4047 int basis,
4048 IntBinaryOperator reducer) {
4049 return ForkJoinTasks.reduceToInt
4050 (this, transformer, basis, reducer).invoke();
4051 }
4052
4053 /**
4054 * Performs the given action for each key.
4055 *
4056 * @param action the action
4057 */
4058 public void forEachKeyInParallel(Block<? super K> action) {
4059 ForkJoinTasks.forEachKey
4060 (this, action).invoke();
4061 }
4062
4063 /**
4064 * Performs the given action for each non-null transformation
4065 * of each key.
4066 *
4067 * @param transformer a function returning the transformation
4068 * for an element, or null of there is no transformation (in
4069 * which case the action is not applied).
4070 * @param action the action
4071 */
4072 public <U> void forEachKeyInParallel
4073 (Function<? super K, ? extends U> transformer,
4074 Block<? super U> action) {
4075 ForkJoinTasks.forEachKey
4076 (this, transformer, action).invoke();
4077 }
4078
4079 /**
4080 * Returns a non-null result from applying the given search
4081 * function on each key, or null if none. Upon success,
4082 * further element processing is suppressed and the results of
4083 * any other parallel invocations of the search function are
4084 * ignored.
4085 *
4086 * @param searchFunction a function returning a non-null
4087 * result on success, else null
4088 * @return a non-null result from applying the given search
4089 * function on each key, or null if none
4090 */
4091 public <U> U searchKeysInParallel
4092 (Function<? super K, ? extends U> searchFunction) {
4093 return ForkJoinTasks.searchKeys
4094 (this, searchFunction).invoke();
4095 }
4096
4097 /**
4098 * Returns the result of accumulating all keys using the given
4099 * reducer to combine values, or null if none.
4100 *
4101 * @param reducer a commutative associative combining function
4102 * @return the result of accumulating all keys using the given
4103 * reducer to combine values, or null if none
4104 */
4105 public K reduceKeysInParallel
4106 (BiFunction<? super K, ? super K, ? extends K> reducer) {
4107 return ForkJoinTasks.reduceKeys
4108 (this, reducer).invoke();
4109 }
4110
4111 /**
4112 * Returns the result of accumulating the given transformation
4113 * of all keys using the given reducer to combine values, or
4114 * null if none.
4115 *
4116 * @param transformer a function returning the transformation
4117 * for an element, or null of there is no transformation (in
4118 * which case it is not combined).
4119 * @param reducer a commutative associative combining function
4120 * @return the result of accumulating the given transformation
4121 * of all keys
4122 */
4123 public <U> U reduceKeysInParallel
4124 (Function<? super K, ? extends U> transformer,
4125 BiFunction<? super U, ? super U, ? extends U> reducer) {
4126 return ForkJoinTasks.reduceKeys
4127 (this, transformer, reducer).invoke();
4128 }
4129
4130 /**
4131 * Returns the result of accumulating the given transformation
4132 * of all keys using the given reducer to combine values, and
4133 * the given basis as an identity value.
4134 *
4135 * @param transformer a function returning the transformation
4136 * for an element
4137 * @param basis the identity (initial default value) for the reduction
4138 * @param reducer a commutative associative combining function
4139 * @return the result of accumulating the given transformation
4140 * of all keys
4141 */
4142 public double reduceKeysToDoubleInParallel
4143 (DoubleFunction<? super K> transformer,
4144 double basis,
4145 DoubleBinaryOperator reducer) {
4146 return ForkJoinTasks.reduceKeysToDouble
4147 (this, transformer, basis, reducer).invoke();
4148 }
4149
4150 /**
4151 * Returns the result of accumulating the given transformation
4152 * of all keys using the given reducer to combine values, and
4153 * the given basis as an identity value.
4154 *
4155 * @param transformer a function returning the transformation
4156 * for an element
4157 * @param basis the identity (initial default value) for the reduction
4158 * @param reducer a commutative associative combining function
4159 * @return the result of accumulating the given transformation
4160 * of all keys
4161 */
4162 public long reduceKeysToLongInParallel
4163 (LongFunction<? super K> transformer,
4164 long basis,
4165 LongBinaryOperator reducer) {
4166 return ForkJoinTasks.reduceKeysToLong
4167 (this, transformer, basis, reducer).invoke();
4168 }
4169
4170 /**
4171 * Returns the result of accumulating the given transformation
4172 * of all keys using the given reducer to combine values, and
4173 * the given basis as an identity value.
4174 *
4175 * @param transformer a function returning the transformation
4176 * for an element
4177 * @param basis the identity (initial default value) for the reduction
4178 * @param reducer a commutative associative combining function
4179 * @return the result of accumulating the given transformation
4180 * of all keys
4181 */
4182 public int reduceKeysToIntInParallel
4183 (IntFunction<? super K> transformer,
4184 int basis,
4185 IntBinaryOperator reducer) {
4186 return ForkJoinTasks.reduceKeysToInt
4187 (this, transformer, basis, reducer).invoke();
4188 }
4189
4190 /**
4191 * Performs the given action for each value.
4192 *
4193 * @param action the action
4194 */
4195 public void forEachValueInParallel(Block<? super V> action) {
4196 ForkJoinTasks.forEachValue
4197 (this, action).invoke();
4198 }
4199
4200 /**
4201 * Performs the given action for each non-null transformation
4202 * of each value.
4203 *
4204 * @param transformer a function returning the transformation
4205 * for an element, or null of there is no transformation (in
4206 * which case the action is not applied).
4207 */
4208 public <U> void forEachValueInParallel
4209 (Function<? super V, ? extends U> transformer,
4210 Block<? super U> action) {
4211 ForkJoinTasks.forEachValue
4212 (this, transformer, action).invoke();
4213 }
4214
4215 /**
4216 * Returns a non-null result from applying the given search
4217 * function on each value, or null if none. Upon success,
4218 * further element processing is suppressed and the results of
4219 * any other parallel invocations of the search function are
4220 * ignored.
4221 *
4222 * @param searchFunction a function returning a non-null
4223 * result on success, else null
4224 * @return a non-null result from applying the given search
4225 * function on each value, or null if none
4226 *
4227 */
4228 public <U> U searchValuesInParallel
4229 (Function<? super V, ? extends U> searchFunction) {
4230 return ForkJoinTasks.searchValues
4231 (this, searchFunction).invoke();
4232 }
4233
4234 /**
4235 * Returns the result of accumulating all values using the
4236 * given reducer to combine values, or null if none.
4237 *
4238 * @param reducer a commutative associative combining function
4239 * @return the result of accumulating all values
4240 */
4241 public V reduceValuesInParallel
4242 (BiFunction<? super V, ? super V, ? extends V> reducer) {
4243 return ForkJoinTasks.reduceValues
4244 (this, reducer).invoke();
4245 }
4246
4247 /**
4248 * Returns the result of accumulating the given transformation
4249 * of all values using the given reducer to combine values, or
4250 * null if none.
4251 *
4252 * @param transformer a function returning the transformation
4253 * for an element, or null of there is no transformation (in
4254 * which case it is not combined).
4255 * @param reducer a commutative associative combining function
4256 * @return the result of accumulating the given transformation
4257 * of all values
4258 */
4259 public <U> U reduceValuesInParallel
4260 (Function<? super V, ? extends U> transformer,
4261 BiFunction<? super U, ? super U, ? extends U> reducer) {
4262 return ForkJoinTasks.reduceValues
4263 (this, transformer, reducer).invoke();
4264 }
4265
4266 /**
4267 * Returns the result of accumulating the given transformation
4268 * of all values using the given reducer to combine values,
4269 * and the given basis as an identity value.
4270 *
4271 * @param transformer a function returning the transformation
4272 * for an element
4273 * @param basis the identity (initial default value) for the reduction
4274 * @param reducer a commutative associative combining function
4275 * @return the result of accumulating the given transformation
4276 * of all values
4277 */
4278 public double reduceValuesToDoubleInParallel
4279 (DoubleFunction<? super V> transformer,
4280 double basis,
4281 DoubleBinaryOperator reducer) {
4282 return ForkJoinTasks.reduceValuesToDouble
4283 (this, transformer, basis, reducer).invoke();
4284 }
4285
4286 /**
4287 * Returns the result of accumulating the given transformation
4288 * of all values using the given reducer to combine values,
4289 * and the given basis as an identity value.
4290 *
4291 * @param transformer a function returning the transformation
4292 * for an element
4293 * @param basis the identity (initial default value) for the reduction
4294 * @param reducer a commutative associative combining function
4295 * @return the result of accumulating the given transformation
4296 * of all values
4297 */
4298 public long reduceValuesToLongInParallel
4299 (LongFunction<? super V> transformer,
4300 long basis,
4301 LongBinaryOperator reducer) {
4302 return ForkJoinTasks.reduceValuesToLong
4303 (this, transformer, basis, reducer).invoke();
4304 }
4305
4306 /**
4307 * Returns the result of accumulating the given transformation
4308 * of all values using the given reducer to combine values,
4309 * and the given basis as an identity value.
4310 *
4311 * @param transformer a function returning the transformation
4312 * for an element
4313 * @param basis the identity (initial default value) for the reduction
4314 * @param reducer a commutative associative combining function
4315 * @return the result of accumulating the given transformation
4316 * of all values
4317 */
4318 public int reduceValuesToIntInParallel
4319 (IntFunction<? super V> transformer,
4320 int basis,
4321 IntBinaryOperator reducer) {
4322 return ForkJoinTasks.reduceValuesToInt
4323 (this, transformer, basis, reducer).invoke();
4324 }
4325
4326 /**
4327 * Performs the given action for each entry.
4328 *
4329 * @param action the action
4330 */
4331 public void forEachEntryInParallel(Block<? super Map.Entry<K,V>> action) {
4332 ForkJoinTasks.forEachEntry
4333 (this, action).invoke();
4334 }
4335
4336 /**
4337 * Performs the given action for each non-null transformation
4338 * of each entry.
4339 *
4340 * @param transformer a function returning the transformation
4341 * for an element, or null of there is no transformation (in
4342 * which case the action is not applied).
4343 * @param action the action
4344 */
4345 public <U> void forEachEntryInParallel
4346 (Function<Map.Entry<K,V>, ? extends U> transformer,
4347 Block<? super U> action) {
4348 ForkJoinTasks.forEachEntry
4349 (this, transformer, action).invoke();
4350 }
4351
4352 /**
4353 * Returns a non-null result from applying the given search
4354 * function on each entry, or null if none. Upon success,
4355 * further element processing is suppressed and the results of
4356 * any other parallel invocations of the search function are
4357 * ignored.
4358 *
4359 * @param searchFunction a function returning a non-null
4360 * result on success, else null
4361 * @return a non-null result from applying the given search
4362 * function on each entry, or null if none
4363 */
4364 public <U> U searchEntriesInParallel
4365 (Function<Map.Entry<K,V>, ? extends U> searchFunction) {
4366 return ForkJoinTasks.searchEntries
4367 (this, searchFunction).invoke();
4368 }
4369
4370 /**
4371 * Returns the result of accumulating all entries using the
4372 * given reducer to combine values, or null if none.
4373 *
4374 * @param reducer a commutative associative combining function
4375 * @return the result of accumulating all entries
4376 */
4377 public Map.Entry<K,V> reduceEntriesInParallel
4378 (BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
4379 return ForkJoinTasks.reduceEntries
4380 (this, reducer).invoke();
4381 }
4382
4383 /**
4384 * Returns the result of accumulating the given transformation
4385 * of all entries using the given reducer to combine values,
4386 * or null if none.
4387 *
4388 * @param transformer a function returning the transformation
4389 * for an element, or null of there is no transformation (in
4390 * which case it is not combined).
4391 * @param reducer a commutative associative combining function
4392 * @return the result of accumulating the given transformation
4393 * of all entries
4394 */
4395 public <U> U reduceEntriesInParallel
4396 (Function<Map.Entry<K,V>, ? extends U> transformer,
4397 BiFunction<? super U, ? super U, ? extends U> reducer) {
4398 return ForkJoinTasks.reduceEntries
4399 (this, transformer, reducer).invoke();
4400 }
4401
4402 /**
4403 * Returns the result of accumulating the given transformation
4404 * of all entries using the given reducer to combine values,
4405 * and the given basis as an identity value.
4406 *
4407 * @param transformer a function returning the transformation
4408 * for an element
4409 * @param basis the identity (initial default value) for the reduction
4410 * @param reducer a commutative associative combining function
4411 * @return the result of accumulating the given transformation
4412 * of all entries
4413 */
4414 public double reduceEntriesToDoubleInParallel
4415 (DoubleFunction<Map.Entry<K,V>> transformer,
4416 double basis,
4417 DoubleBinaryOperator reducer) {
4418 return ForkJoinTasks.reduceEntriesToDouble
4419 (this, transformer, basis, reducer).invoke();
4420 }
4421
4422 /**
4423 * Returns the result of accumulating the given transformation
4424 * of all entries using the given reducer to combine values,
4425 * and the given basis as an identity value.
4426 *
4427 * @param transformer a function returning the transformation
4428 * for an element
4429 * @param basis the identity (initial default value) for the reduction
4430 * @param reducer a commutative associative combining function
4431 * @return the result of accumulating the given transformation
4432 * of all entries
4433 */
4434 public long reduceEntriesToLongInParallel
4435 (LongFunction<Map.Entry<K,V>> transformer,
4436 long basis,
4437 LongBinaryOperator reducer) {
4438 return ForkJoinTasks.reduceEntriesToLong
4439 (this, transformer, basis, reducer).invoke();
4440 }
4441
4442 /**
4443 * Returns the result of accumulating the given transformation
4444 * of all entries using the given reducer to combine values,
4445 * and the given basis as an identity value.
4446 *
4447 * @param transformer a function returning the transformation
4448 * for an element
4449 * @param basis the identity (initial default value) for the reduction
4450 * @param reducer a commutative associative combining function
4451 * @return the result of accumulating the given transformation
4452 * of all entries
4453 */
4454 public int reduceEntriesToIntInParallel
4455 (IntFunction<Map.Entry<K,V>> transformer,
4456 int basis,
4457 IntBinaryOperator reducer) {
4458 return ForkJoinTasks.reduceEntriesToInt
4459 (this, transformer, basis, reducer).invoke();
4460 }
4461
4462
4463 /* ----------------Views -------------- */
4464
4465 /**
4466 * Base class for views.
4467 */
4468 static abstract class CHMView<K, V> {
4469 final ConcurrentHashMap<K, V> map;
4470 CHMView(ConcurrentHashMap<K, V> map) { this.map = map; }
4471
4472 /**
4473 * Returns the map backing this view.
4474 *
4475 * @return the map backing this view
4476 */
4477 public ConcurrentHashMap<K,V> getMap() { return map; }
4478
4479 public final int size() { return map.size(); }
4480 public final boolean isEmpty() { return map.isEmpty(); }
4481 public final void clear() { map.clear(); }
4482
4483 // implementations below rely on concrete classes supplying these
4484 abstract public Iterator<?> iterator();
4485 abstract public boolean contains(Object o);
4486 abstract public boolean remove(Object o);
4487
4488 private static final String oomeMsg = "Required array size too large";
4489
4490 public final Object[] toArray() {
4491 long sz = map.mappingCount();
4492 if (sz > (long)(MAX_ARRAY_SIZE))
4493 throw new OutOfMemoryError(oomeMsg);
4494 int n = (int)sz;
4495 Object[] r = new Object[n];
4496 int i = 0;
4497 Iterator<?> it = iterator();
4498 while (it.hasNext()) {
4499 if (i == n) {
4500 if (n >= MAX_ARRAY_SIZE)
4501 throw new OutOfMemoryError(oomeMsg);
4502 if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
4503 n = MAX_ARRAY_SIZE;
4504 else
4505 n += (n >>> 1) + 1;
4506 r = Arrays.copyOf(r, n);
4507 }
4508 r[i++] = it.next();
4509 }
4510 return (i == n) ? r : Arrays.copyOf(r, i);
4511 }
4512
4513 @SuppressWarnings("unchecked") public final <T> T[] toArray(T[] a) {
4514 long sz = map.mappingCount();
4515 if (sz > (long)(MAX_ARRAY_SIZE))
4516 throw new OutOfMemoryError(oomeMsg);
4517 int m = (int)sz;
4518 T[] r = (a.length >= m) ? a :
4519 (T[])java.lang.reflect.Array
4520 .newInstance(a.getClass().getComponentType(), m);
4521 int n = r.length;
4522 int i = 0;
4523 Iterator<?> it = iterator();
4524 while (it.hasNext()) {
4525 if (i == n) {
4526 if (n >= MAX_ARRAY_SIZE)
4527 throw new OutOfMemoryError(oomeMsg);
4528 if (n >= MAX_ARRAY_SIZE - (MAX_ARRAY_SIZE >>> 1) - 1)
4529 n = MAX_ARRAY_SIZE;
4530 else
4531 n += (n >>> 1) + 1;
4532 r = Arrays.copyOf(r, n);
4533 }
4534 r[i++] = (T)it.next();
4535 }
4536 if (a == r && i < n) {
4537 r[i] = null; // null-terminate
4538 return r;
4539 }
4540 return (i == n) ? r : Arrays.copyOf(r, i);
4541 }
4542
4543 public final int hashCode() {
4544 int h = 0;
4545 for (Iterator<?> it = iterator(); it.hasNext();)
4546 h += it.next().hashCode();
4547 return h;
4548 }
4549
4550 public final String toString() {
4551 StringBuilder sb = new StringBuilder();
4552 sb.append('[');
4553 Iterator<?> it = iterator();
4554 if (it.hasNext()) {
4555 for (;;) {
4556 Object e = it.next();
4557 sb.append(e == this ? "(this Collection)" : e);
4558 if (!it.hasNext())
4559 break;
4560 sb.append(',').append(' ');
4561 }
4562 }
4563 return sb.append(']').toString();
4564 }
4565
4566 public final boolean containsAll(Collection<?> c) {
4567 if (c != this) {
4568 for (Iterator<?> it = c.iterator(); it.hasNext();) {
4569 Object e = it.next();
4570 if (e == null || !contains(e))
4571 return false;
4572 }
4573 }
4574 return true;
4575 }
4576
4577 public final boolean removeAll(Collection<?> c) {
4578 boolean modified = false;
4579 for (Iterator<?> it = iterator(); it.hasNext();) {
4580 if (c.contains(it.next())) {
4581 it.remove();
4582 modified = true;
4583 }
4584 }
4585 return modified;
4586 }
4587
4588 public final boolean retainAll(Collection<?> c) {
4589 boolean modified = false;
4590 for (Iterator<?> it = iterator(); it.hasNext();) {
4591 if (!c.contains(it.next())) {
4592 it.remove();
4593 modified = true;
4594 }
4595 }
4596 return modified;
4597 }
4598
4599 }
4600
4601 /**
4602 * A view of a ConcurrentHashMap as a {@link Set} of keys, in
4603 * which additions may optionally be enabled by mapping to a
4604 * common value. This class cannot be directly instantiated. See
4605 * {@link #keySet}, {@link #keySet(Object)}, {@link #newKeySet()},
4606 * {@link #newKeySet(int)}.
4607 */
4608 public static class KeySetView<K,V> extends CHMView<K,V>
4609 implements Set<K>, java.io.Serializable {
4610 private static final long serialVersionUID = 7249069246763182397L;
4611 private final V value;
4612 KeySetView(ConcurrentHashMap<K, V> map, V value) { // non-public
4613 super(map);
4614 this.value = value;
4615 }
4616
4617 /**
4618 * Returns the default mapped value for additions,
4619 * or {@code null} if additions are not supported.
4620 *
4621 * @return the default mapped value for additions, or {@code null}
4622 * if not supported.
4623 */
4624 public V getMappedValue() { return value; }
4625
4626 // implement Set API
4627
4628 public boolean contains(Object o) { return map.containsKey(o); }
4629 public boolean remove(Object o) { return map.remove(o) != null; }
4630
4631 /**
4632 * Returns a "weakly consistent" iterator that will never
4633 * throw {@link ConcurrentModificationException}, and
4634 * guarantees to traverse elements as they existed upon
4635 * construction of the iterator, and may (but is not
4636 * guaranteed to) reflect any modifications subsequent to
4637 * construction.
4638 *
4639 * @return an iterator over the keys of this map
4640 */
4641 public Iterator<K> iterator() { return new KeyIterator<K,V>(map); }
4642 public boolean add(K e) {
4643 V v;
4644 if ((v = value) == null)
4645 throw new UnsupportedOperationException();
4646 if (e == null)
4647 throw new NullPointerException();
4648 return map.internalPut(e, v, true) == null;
4649 }
4650 public boolean addAll(Collection<? extends K> c) {
4651 boolean added = false;
4652 V v;
4653 if ((v = value) == null)
4654 throw new UnsupportedOperationException();
4655 for (K e : c) {
4656 if (e == null)
4657 throw new NullPointerException();
4658 if (map.internalPut(e, v, true) == null)
4659 added = true;
4660 }
4661 return added;
4662 }
4663 public boolean equals(Object o) {
4664 Set<?> c;
4665 return ((o instanceof Set) &&
4666 ((c = (Set<?>)o) == this ||
4667 (containsAll(c) && c.containsAll(this))));
4668 }
4669
4670 public Stream<K> stream() {
4671 return Streams.stream(() -> new KeyIterator<K,V>(map), 0);
4672 }
4673 public Stream<K> parallelStream() {
4674 return Streams.parallelStream(() -> new KeyIterator<K,V>(map, null),
4675 0);
4676 }
4677 }
4678
4679 /**
4680 * A view of a ConcurrentHashMap as a {@link Collection} of
4681 * values, in which additions are disabled. This class cannot be
4682 * directly instantiated. See {@link #values},
4683 *
4684 * <p>The view's {@code iterator} is a "weakly consistent" iterator
4685 * that will never throw {@link ConcurrentModificationException},
4686 * and guarantees to traverse elements as they existed upon
4687 * construction of the iterator, and may (but is not guaranteed to)
4688 * reflect any modifications subsequent to construction.
4689 */
4690 public static final class ValuesView<K,V> extends CHMView<K,V>
4691 implements Collection<V> {
4692 ValuesView(ConcurrentHashMap<K, V> map) { super(map); }
4693 public final boolean contains(Object o) { return map.containsValue(o); }
4694 public final boolean remove(Object o) {
4695 if (o != null) {
4696 Iterator<V> it = new ValueIterator<K,V>(map);
4697 while (it.hasNext()) {
4698 if (o.equals(it.next())) {
4699 it.remove();
4700 return true;
4701 }
4702 }
4703 }
4704 return false;
4705 }
4706
4707 /**
4708 * Returns a "weakly consistent" iterator that will never
4709 * throw {@link ConcurrentModificationException}, and
4710 * guarantees to traverse elements as they existed upon
4711 * construction of the iterator, and may (but is not
4712 * guaranteed to) reflect any modifications subsequent to
4713 * construction.
4714 *
4715 * @return an iterator over the values of this map
4716 */
4717 public final Iterator<V> iterator() {
4718 return new ValueIterator<K,V>(map);
4719 }
4720 public final boolean add(V e) {
4721 throw new UnsupportedOperationException();
4722 }
4723 public final boolean addAll(Collection<? extends V> c) {
4724 throw new UnsupportedOperationException();
4725 }
4726
4727 public Stream<V> stream() {
4728 return Streams.stream(() -> new ValueIterator<K,V>(map), 0);
4729 }
4730
4731 public Stream<V> parallelStream() {
4732 return Streams.parallelStream(() -> new ValueIterator<K,V>(map, null),
4733 0);
4734 }
4735
4736 }
4737
4738 /**
4739 * A view of a ConcurrentHashMap as a {@link Set} of (key, value)
4740 * entries. This class cannot be directly instantiated. See
4741 * {@link #entrySet}.
4742 */
4743 public static final class EntrySetView<K,V> extends CHMView<K,V>
4744 implements Set<Map.Entry<K,V>> {
4745 EntrySetView(ConcurrentHashMap<K, V> map) { super(map); }
4746 public final boolean contains(Object o) {
4747 Object k, v, r; Map.Entry<?,?> e;
4748 return ((o instanceof Map.Entry) &&
4749 (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
4750 (r = map.get(k)) != null &&
4751 (v = e.getValue()) != null &&
4752 (v == r || v.equals(r)));
4753 }
4754 public final boolean remove(Object o) {
4755 Object k, v; Map.Entry<?,?> e;
4756 return ((o instanceof Map.Entry) &&
4757 (k = (e = (Map.Entry<?,?>)o).getKey()) != null &&
4758 (v = e.getValue()) != null &&
4759 map.remove(k, v));
4760 }
4761
4762 /**
4763 * Returns a "weakly consistent" iterator that will never
4764 * throw {@link ConcurrentModificationException}, and
4765 * guarantees to traverse elements as they existed upon
4766 * construction of the iterator, and may (but is not
4767 * guaranteed to) reflect any modifications subsequent to
4768 * construction.
4769 *
4770 * @return an iterator over the entries of this map
4771 */
4772 public final Iterator<Map.Entry<K,V>> iterator() {
4773 return new EntryIterator<K,V>(map);
4774 }
4775
4776 public final boolean add(Entry<K,V> e) {
4777 K key = e.getKey();
4778 V value = e.getValue();
4779 if (key == null || value == null)
4780 throw new NullPointerException();
4781 return map.internalPut(key, value, false) == null;
4782 }
4783 public final boolean addAll(Collection<? extends Entry<K,V>> c) {
4784 boolean added = false;
4785 for (Entry<K,V> e : c) {
4786 if (add(e))
4787 added = true;
4788 }
4789 return added;
4790 }
4791 public boolean equals(Object o) {
4792 Set<?> c;
4793 return ((o instanceof Set) &&
4794 ((c = (Set<?>)o) == this ||
4795 (containsAll(c) && c.containsAll(this))));
4796 }
4797
4798 public Stream<Map.Entry<K,V>> stream() {
4799 return Streams.stream(() -> new EntryIterator<K,V>(map), 0);
4800 }
4801
4802 public Stream<Map.Entry<K,V>> parallelStream() {
4803 return Streams.parallelStream(() -> new EntryIterator<K,V>(map, null),
4804 0);
4805 }
4806 }
4807
4808 // ---------------------------------------------------------------------
4809
4810 /**
4811 * Predefined tasks for performing bulk parallel operations on
4812 * ConcurrentHashMaps. These tasks follow the forms and rules used
4813 * for bulk operations. Each method has the same name, but returns
4814 * a task rather than invoking it. These methods may be useful in
4815 * custom applications such as submitting a task without waiting
4816 * for completion, using a custom pool, or combining with other
4817 * tasks.
4818 */
4819 public static class ForkJoinTasks {
4820 private ForkJoinTasks() {}
4821
4822 /**
4823 * Returns a task that when invoked, performs the given
4824 * action for each (key, value)
4825 *
4826 * @param map the map
4827 * @param action the action
4828 * @return the task
4829 */
4830 public static <K,V> ForkJoinTask<Void> forEach
4831 (ConcurrentHashMap<K,V> map,
4832 BiBlock<? super K, ? super V> action) {
4833 if (action == null) throw new NullPointerException();
4834 return new ForEachMappingTask<K,V>(map, null, -1, action);
4835 }
4836
4837 /**
4838 * Returns a task that when invoked, performs the given
4839 * action for each non-null transformation of each (key, value)
4840 *
4841 * @param map the map
4842 * @param transformer a function returning the transformation
4843 * for an element, or null if there is no transformation (in
4844 * which case the action is not applied)
4845 * @param action the action
4846 * @return the task
4847 */
4848 public static <K,V,U> ForkJoinTask<Void> forEach
4849 (ConcurrentHashMap<K,V> map,
4850 BiFunction<? super K, ? super V, ? extends U> transformer,
4851 Block<? super U> action) {
4852 if (transformer == null || action == null)
4853 throw new NullPointerException();
4854 return new ForEachTransformedMappingTask<K,V,U>
4855 (map, null, -1, transformer, action);
4856 }
4857
4858 /**
4859 * Returns a task that when invoked, returns a non-null result
4860 * from applying the given search function on each (key,
4861 * value), or null if none. Upon success, further element
4862 * processing is suppressed and the results of any other
4863 * parallel invocations of the search function are ignored.
4864 *
4865 * @param map the map
4866 * @param searchFunction a function returning a non-null
4867 * result on success, else null
4868 * @return the task
4869 */
4870 public static <K,V,U> ForkJoinTask<U> search
4871 (ConcurrentHashMap<K,V> map,
4872 BiFunction<? super K, ? super V, ? extends U> searchFunction) {
4873 if (searchFunction == null) throw new NullPointerException();
4874 return new SearchMappingsTask<K,V,U>
4875 (map, null, -1, searchFunction,
4876 new AtomicReference<U>());
4877 }
4878
4879 /**
4880 * Returns a task that when invoked, returns the result of
4881 * accumulating the given transformation of all (key, value) pairs
4882 * using the given reducer to combine values, or null if none.
4883 *
4884 * @param map the map
4885 * @param transformer a function returning the transformation
4886 * for an element, or null if there is no transformation (in
4887 * which case it is not combined).
4888 * @param reducer a commutative associative combining function
4889 * @return the task
4890 */
4891 public static <K,V,U> ForkJoinTask<U> reduce
4892 (ConcurrentHashMap<K,V> map,
4893 BiFunction<? super K, ? super V, ? extends U> transformer,
4894 BiFunction<? super U, ? super U, ? extends U> reducer) {
4895 if (transformer == null || reducer == null)
4896 throw new NullPointerException();
4897 return new MapReduceMappingsTask<K,V,U>
4898 (map, null, -1, null, transformer, reducer);
4899 }
4900
4901 /**
4902 * Returns a task that when invoked, returns the result of
4903 * accumulating the given transformation of all (key, value) pairs
4904 * using the given reducer to combine values, and the given
4905 * basis as an identity value.
4906 *
4907 * @param map the map
4908 * @param transformer a function returning the transformation
4909 * for an element
4910 * @param basis the identity (initial default value) for the reduction
4911 * @param reducer a commutative associative combining function
4912 * @return the task
4913 */
4914 public static <K,V> ForkJoinTask<Double> reduceToDouble
4915 (ConcurrentHashMap<K,V> map,
4916 DoubleBiFunction<? super K, ? super V> transformer,
4917 double basis,
4918 DoubleBinaryOperator reducer) {
4919 if (transformer == null || reducer == null)
4920 throw new NullPointerException();
4921 return new MapReduceMappingsToDoubleTask<K,V>
4922 (map, null, -1, null, transformer, basis, reducer);
4923 }
4924
4925 /**
4926 * Returns a task that when invoked, returns the result of
4927 * accumulating the given transformation of all (key, value) pairs
4928 * using the given reducer to combine values, and the given
4929 * basis as an identity value.
4930 *
4931 * @param map the map
4932 * @param transformer a function returning the transformation
4933 * for an element
4934 * @param basis the identity (initial default value) for the reduction
4935 * @param reducer a commutative associative combining function
4936 * @return the task
4937 */
4938 public static <K,V> ForkJoinTask<Long> reduceToLong
4939 (ConcurrentHashMap<K,V> map,
4940 LongBiFunction<? super K, ? super V> transformer,
4941 long basis,
4942 LongBinaryOperator reducer) {
4943 if (transformer == null || reducer == null)
4944 throw new NullPointerException();
4945 return new MapReduceMappingsToLongTask<K,V>
4946 (map, null, -1, null, transformer, basis, reducer);
4947 }
4948
4949 /**
4950 * Returns a task that when invoked, returns the result of
4951 * accumulating the given transformation of all (key, value) pairs
4952 * using the given reducer to combine values, and the given
4953 * basis as an identity value.
4954 *
4955 * @param transformer a function returning the transformation
4956 * for an element
4957 * @param basis the identity (initial default value) for the reduction
4958 * @param reducer a commutative associative combining function
4959 * @return the task
4960 */
4961 public static <K,V> ForkJoinTask<Integer> reduceToInt
4962 (ConcurrentHashMap<K,V> map,
4963 IntBiFunction<? super K, ? super V> transformer,
4964 int basis,
4965 IntBinaryOperator reducer) {
4966 if (transformer == null || reducer == null)
4967 throw new NullPointerException();
4968 return new MapReduceMappingsToIntTask<K,V>
4969 (map, null, -1, null, transformer, basis, reducer);
4970 }
4971
4972 /**
4973 * Returns a task that when invoked, performs the given action
4974 * for each key.
4975 *
4976 * @param map the map
4977 * @param action the action
4978 * @return the task
4979 */
4980 public static <K,V> ForkJoinTask<Void> forEachKey
4981 (ConcurrentHashMap<K,V> map,
4982 Block<? super K> action) {
4983 if (action == null) throw new NullPointerException();
4984 return new ForEachKeyTask<K,V>(map, null, -1, action);
4985 }
4986
4987 /**
4988 * Returns a task that when invoked, performs the given action
4989 * for each non-null transformation of each key.
4990 *
4991 * @param map the map
4992 * @param transformer a function returning the transformation
4993 * for an element, or null if there is no transformation (in
4994 * which case the action is not applied)
4995 * @param action the action
4996 * @return the task
4997 */
4998 public static <K,V,U> ForkJoinTask<Void> forEachKey
4999 (ConcurrentHashMap<K,V> map,
5000 Function<? super K, ? extends U> transformer,
5001 Block<? super U> action) {
5002 if (transformer == null || action == null)
5003 throw new NullPointerException();
5004 return new ForEachTransformedKeyTask<K,V,U>
5005 (map, null, -1, transformer, action);
5006 }
5007
5008 /**
5009 * Returns a task that when invoked, returns a non-null result
5010 * from applying the given search function on each key, or
5011 * null if none. Upon success, further element processing is
5012 * suppressed and the results of any other parallel
5013 * invocations of the search function are ignored.
5014 *
5015 * @param map the map
5016 * @param searchFunction a function returning a non-null
5017 * result on success, else null
5018 * @return the task
5019 */
5020 public static <K,V,U> ForkJoinTask<U> searchKeys
5021 (ConcurrentHashMap<K,V> map,
5022 Function<? super K, ? extends U> searchFunction) {
5023 if (searchFunction == null) throw new NullPointerException();
5024 return new SearchKeysTask<K,V,U>
5025 (map, null, -1, searchFunction,
5026 new AtomicReference<U>());
5027 }
5028
5029 /**
5030 * Returns a task that when invoked, returns the result of
5031 * accumulating all keys using the given reducer to combine
5032 * values, or null if none.
5033 *
5034 * @param map the map
5035 * @param reducer a commutative associative combining function
5036 * @return the task
5037 */
5038 public static <K,V> ForkJoinTask<K> reduceKeys
5039 (ConcurrentHashMap<K,V> map,
5040 BiFunction<? super K, ? super K, ? extends K> reducer) {
5041 if (reducer == null) throw new NullPointerException();
5042 return new ReduceKeysTask<K,V>
5043 (map, null, -1, null, reducer);
5044 }
5045
5046 /**
5047 * Returns a task that when invoked, returns the result of
5048 * accumulating the given transformation of all keys using the given
5049 * reducer to combine values, or null if none.
5050 *
5051 * @param map the map
5052 * @param transformer a function returning the transformation
5053 * for an element, or null if there is no transformation (in
5054 * which case it is not combined).
5055 * @param reducer a commutative associative combining function
5056 * @return the task
5057 */
5058 public static <K,V,U> ForkJoinTask<U> reduceKeys
5059 (ConcurrentHashMap<K,V> map,
5060 Function<? super K, ? extends U> transformer,
5061 BiFunction<? super U, ? super U, ? extends U> reducer) {
5062 if (transformer == null || reducer == null)
5063 throw new NullPointerException();
5064 return new MapReduceKeysTask<K,V,U>
5065 (map, null, -1, null, transformer, reducer);
5066 }
5067
5068 /**
5069 * Returns a task that when invoked, returns the result of
5070 * accumulating the given transformation of all keys using the given
5071 * reducer to combine values, and the given basis as an
5072 * identity value.
5073 *
5074 * @param map the map
5075 * @param transformer a function returning the transformation
5076 * for an element
5077 * @param basis the identity (initial default value) for the reduction
5078 * @param reducer a commutative associative combining function
5079 * @return the task
5080 */
5081 public static <K,V> ForkJoinTask<Double> reduceKeysToDouble
5082 (ConcurrentHashMap<K,V> map,
5083 DoubleFunction<? super K> transformer,
5084 double basis,
5085 DoubleBinaryOperator reducer) {
5086 if (transformer == null || reducer == null)
5087 throw new NullPointerException();
5088 return new MapReduceKeysToDoubleTask<K,V>
5089 (map, null, -1, null, transformer, basis, reducer);
5090 }
5091
5092 /**
5093 * Returns a task that when invoked, returns the result of
5094 * accumulating the given transformation of all keys using the given
5095 * reducer to combine values, and the given basis as an
5096 * identity value.
5097 *
5098 * @param map the map
5099 * @param transformer a function returning the transformation
5100 * for an element
5101 * @param basis the identity (initial default value) for the reduction
5102 * @param reducer a commutative associative combining function
5103 * @return the task
5104 */
5105 public static <K,V> ForkJoinTask<Long> reduceKeysToLong
5106 (ConcurrentHashMap<K,V> map,
5107 LongFunction<? super K> transformer,
5108 long basis,
5109 LongBinaryOperator reducer) {
5110 if (transformer == null || reducer == null)
5111 throw new NullPointerException();
5112 return new MapReduceKeysToLongTask<K,V>
5113 (map, null, -1, null, transformer, basis, reducer);
5114 }
5115
5116 /**
5117 * Returns a task that when invoked, returns the result of
5118 * accumulating the given transformation of all keys using the given
5119 * reducer to combine values, and the given basis as an
5120 * identity value.
5121 *
5122 * @param map the map
5123 * @param transformer a function returning the transformation
5124 * for an element
5125 * @param basis the identity (initial default value) for the reduction
5126 * @param reducer a commutative associative combining function
5127 * @return the task
5128 */
5129 public static <K,V> ForkJoinTask<Integer> reduceKeysToInt
5130 (ConcurrentHashMap<K,V> map,
5131 IntFunction<? super K> transformer,
5132 int basis,
5133 IntBinaryOperator reducer) {
5134 if (transformer == null || reducer == null)
5135 throw new NullPointerException();
5136 return new MapReduceKeysToIntTask<K,V>
5137 (map, null, -1, null, transformer, basis, reducer);
5138 }
5139
5140 /**
5141 * Returns a task that when invoked, performs the given action
5142 * for each value.
5143 *
5144 * @param map the map
5145 * @param action the action
5146 */
5147 public static <K,V> ForkJoinTask<Void> forEachValue
5148 (ConcurrentHashMap<K,V> map,
5149 Block<? super V> action) {
5150 if (action == null) throw new NullPointerException();
5151 return new ForEachValueTask<K,V>(map, null, -1, action);
5152 }
5153
5154 /**
5155 * Returns a task that when invoked, performs the given action
5156 * for each non-null transformation of each value.
5157 *
5158 * @param map the map
5159 * @param transformer a function returning the transformation
5160 * for an element, or null if there is no transformation (in
5161 * which case the action is not applied)
5162 * @param action the action
5163 */
5164 public static <K,V,U> ForkJoinTask<Void> forEachValue
5165 (ConcurrentHashMap<K,V> map,
5166 Function<? super V, ? extends U> transformer,
5167 Block<? super U> action) {
5168 if (transformer == null || action == null)
5169 throw new NullPointerException();
5170 return new ForEachTransformedValueTask<K,V,U>
5171 (map, null, -1, transformer, action);
5172 }
5173
5174 /**
5175 * Returns a task that when invoked, returns a non-null result
5176 * from applying the given search function on each value, or
5177 * null if none. Upon success, further element processing is
5178 * suppressed and the results of any other parallel
5179 * invocations of the search function are ignored.
5180 *
5181 * @param map the map
5182 * @param searchFunction a function returning a non-null
5183 * result on success, else null
5184 * @return the task
5185 */
5186 public static <K,V,U> ForkJoinTask<U> searchValues
5187 (ConcurrentHashMap<K,V> map,
5188 Function<? super V, ? extends U> searchFunction) {
5189 if (searchFunction == null) throw new NullPointerException();
5190 return new SearchValuesTask<K,V,U>
5191 (map, null, -1, searchFunction,
5192 new AtomicReference<U>());
5193 }
5194
5195 /**
5196 * Returns a task that when invoked, returns the result of
5197 * accumulating all values using the given reducer to combine
5198 * values, or null if none.
5199 *
5200 * @param map the map
5201 * @param reducer a commutative associative combining function
5202 * @return the task
5203 */
5204 public static <K,V> ForkJoinTask<V> reduceValues
5205 (ConcurrentHashMap<K,V> map,
5206 BiFunction<? super V, ? super V, ? extends V> reducer) {
5207 if (reducer == null) throw new NullPointerException();
5208 return new ReduceValuesTask<K,V>
5209 (map, null, -1, null, reducer);
5210 }
5211
5212 /**
5213 * Returns a task that when invoked, returns the result of
5214 * accumulating the given transformation of all values using the
5215 * given reducer to combine values, or null if none.
5216 *
5217 * @param map the map
5218 * @param transformer a function returning the transformation
5219 * for an element, or null if there is no transformation (in
5220 * which case it is not combined).
5221 * @param reducer a commutative associative combining function
5222 * @return the task
5223 */
5224 public static <K,V,U> ForkJoinTask<U> reduceValues
5225 (ConcurrentHashMap<K,V> map,
5226 Function<? super V, ? extends U> transformer,
5227 BiFunction<? super U, ? super U, ? extends U> reducer) {
5228 if (transformer == null || reducer == null)
5229 throw new NullPointerException();
5230 return new MapReduceValuesTask<K,V,U>
5231 (map, null, -1, null, transformer, reducer);
5232 }
5233
5234 /**
5235 * Returns a task that when invoked, returns the result of
5236 * accumulating the given transformation of all values using the
5237 * given reducer to combine values, and the given basis as an
5238 * identity value.
5239 *
5240 * @param map the map
5241 * @param transformer a function returning the transformation
5242 * for an element
5243 * @param basis the identity (initial default value) for the reduction
5244 * @param reducer a commutative associative combining function
5245 * @return the task
5246 */
5247 public static <K,V> ForkJoinTask<Double> reduceValuesToDouble
5248 (ConcurrentHashMap<K,V> map,
5249 DoubleFunction<? super V> transformer,
5250 double basis,
5251 DoubleBinaryOperator reducer) {
5252 if (transformer == null || reducer == null)
5253 throw new NullPointerException();
5254 return new MapReduceValuesToDoubleTask<K,V>
5255 (map, null, -1, null, transformer, basis, reducer);
5256 }
5257
5258 /**
5259 * Returns a task that when invoked, returns the result of
5260 * accumulating the given transformation of all values using the
5261 * given reducer to combine values, and the given basis as an
5262 * identity value.
5263 *
5264 * @param map the map
5265 * @param transformer a function returning the transformation
5266 * for an element
5267 * @param basis the identity (initial default value) for the reduction
5268 * @param reducer a commutative associative combining function
5269 * @return the task
5270 */
5271 public static <K,V> ForkJoinTask<Long> reduceValuesToLong
5272 (ConcurrentHashMap<K,V> map,
5273 LongFunction<? super V> transformer,
5274 long basis,
5275 LongBinaryOperator reducer) {
5276 if (transformer == null || reducer == null)
5277 throw new NullPointerException();
5278 return new MapReduceValuesToLongTask<K,V>
5279 (map, null, -1, null, transformer, basis, reducer);
5280 }
5281
5282 /**
5283 * Returns a task that when invoked, returns the result of
5284 * accumulating the given transformation of all values using the
5285 * given reducer to combine values, and the given basis as an
5286 * identity value.
5287 *
5288 * @param map the map
5289 * @param transformer a function returning the transformation
5290 * for an element
5291 * @param basis the identity (initial default value) for the reduction
5292 * @param reducer a commutative associative combining function
5293 * @return the task
5294 */
5295 public static <K,V> ForkJoinTask<Integer> reduceValuesToInt
5296 (ConcurrentHashMap<K,V> map,
5297 IntFunction<? super V> transformer,
5298 int basis,
5299 IntBinaryOperator reducer) {
5300 if (transformer == null || reducer == null)
5301 throw new NullPointerException();
5302 return new MapReduceValuesToIntTask<K,V>
5303 (map, null, -1, null, transformer, basis, reducer);
5304 }
5305
5306 /**
5307 * Returns a task that when invoked, perform the given action
5308 * for each entry.
5309 *
5310 * @param map the map
5311 * @param action the action
5312 */
5313 public static <K,V> ForkJoinTask<Void> forEachEntry
5314 (ConcurrentHashMap<K,V> map,
5315 Block<? super Map.Entry<K,V>> action) {
5316 if (action == null) throw new NullPointerException();
5317 return new ForEachEntryTask<K,V>(map, null, -1, action);
5318 }
5319
5320 /**
5321 * Returns a task that when invoked, perform the given action
5322 * for each non-null transformation of each entry.
5323 *
5324 * @param map the map
5325 * @param transformer a function returning the transformation
5326 * for an element, or null if there is no transformation (in
5327 * which case the action is not applied)
5328 * @param action the action
5329 */
5330 public static <K,V,U> ForkJoinTask<Void> forEachEntry
5331 (ConcurrentHashMap<K,V> map,
5332 Function<Map.Entry<K,V>, ? extends U> transformer,
5333 Block<? super U> action) {
5334 if (transformer == null || action == null)
5335 throw new NullPointerException();
5336 return new ForEachTransformedEntryTask<K,V,U>
5337 (map, null, -1, transformer, action);
5338 }
5339
5340 /**
5341 * Returns a task that when invoked, returns a non-null result
5342 * from applying the given search function on each entry, or
5343 * null if none. Upon success, further element processing is
5344 * suppressed and the results of any other parallel
5345 * invocations of the search function are ignored.
5346 *
5347 * @param map the map
5348 * @param searchFunction a function returning a non-null
5349 * result on success, else null
5350 * @return the task
5351 */
5352 public static <K,V,U> ForkJoinTask<U> searchEntries
5353 (ConcurrentHashMap<K,V> map,
5354 Function<Map.Entry<K,V>, ? extends U> searchFunction) {
5355 if (searchFunction == null) throw new NullPointerException();
5356 return new SearchEntriesTask<K,V,U>
5357 (map, null, -1, searchFunction,
5358 new AtomicReference<U>());
5359 }
5360
5361 /**
5362 * Returns a task that when invoked, returns the result of
5363 * accumulating all entries using the given reducer to combine
5364 * values, or null if none.
5365 *
5366 * @param map the map
5367 * @param reducer a commutative associative combining function
5368 * @return the task
5369 */
5370 public static <K,V> ForkJoinTask<Map.Entry<K,V>> reduceEntries
5371 (ConcurrentHashMap<K,V> map,
5372 BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
5373 if (reducer == null) throw new NullPointerException();
5374 return new ReduceEntriesTask<K,V>
5375 (map, null, -1, null, reducer);
5376 }
5377
5378 /**
5379 * Returns a task that when invoked, returns the result of
5380 * accumulating the given transformation of all entries using the
5381 * given reducer to combine values, or null if none.
5382 *
5383 * @param map the map
5384 * @param transformer a function returning the transformation
5385 * for an element, or null if there is no transformation (in
5386 * which case it is not combined).
5387 * @param reducer a commutative associative combining function
5388 * @return the task
5389 */
5390 public static <K,V,U> ForkJoinTask<U> reduceEntries
5391 (ConcurrentHashMap<K,V> map,
5392 Function<Map.Entry<K,V>, ? extends U> transformer,
5393 BiFunction<? super U, ? super U, ? extends U> reducer) {
5394 if (transformer == null || reducer == null)
5395 throw new NullPointerException();
5396 return new MapReduceEntriesTask<K,V,U>
5397 (map, null, -1, null, transformer, reducer);
5398 }
5399
5400 /**
5401 * Returns a task that when invoked, returns the result of
5402 * accumulating the given transformation of all entries using the
5403 * given reducer to combine values, and the given basis as an
5404 * identity value.
5405 *
5406 * @param map the map
5407 * @param transformer a function returning the transformation
5408 * for an element
5409 * @param basis the identity (initial default value) for the reduction
5410 * @param reducer a commutative associative combining function
5411 * @return the task
5412 */
5413 public static <K,V> ForkJoinTask<Double> reduceEntriesToDouble
5414 (ConcurrentHashMap<K,V> map,
5415 DoubleFunction<Map.Entry<K,V>> transformer,
5416 double basis,
5417 DoubleBinaryOperator reducer) {
5418 if (transformer == null || reducer == null)
5419 throw new NullPointerException();
5420 return new MapReduceEntriesToDoubleTask<K,V>
5421 (map, null, -1, null, transformer, basis, reducer);
5422 }
5423
5424 /**
5425 * Returns a task that when invoked, returns the result of
5426 * accumulating the given transformation of all entries using the
5427 * given reducer to combine values, and the given basis as an
5428 * identity value.
5429 *
5430 * @param map the map
5431 * @param transformer a function returning the transformation
5432 * for an element
5433 * @param basis the identity (initial default value) for the reduction
5434 * @param reducer a commutative associative combining function
5435 * @return the task
5436 */
5437 public static <K,V> ForkJoinTask<Long> reduceEntriesToLong
5438 (ConcurrentHashMap<K,V> map,
5439 LongFunction<Map.Entry<K,V>> transformer,
5440 long basis,
5441 LongBinaryOperator reducer) {
5442 if (transformer == null || reducer == null)
5443 throw new NullPointerException();
5444 return new MapReduceEntriesToLongTask<K,V>
5445 (map, null, -1, null, transformer, basis, reducer);
5446 }
5447
5448 /**
5449 * Returns a task that when invoked, returns the result of
5450 * accumulating the given transformation of all entries using the
5451 * given reducer to combine values, and the given basis as an
5452 * identity value.
5453 *
5454 * @param map the map
5455 * @param transformer a function returning the transformation
5456 * for an element
5457 * @param basis the identity (initial default value) for the reduction
5458 * @param reducer a commutative associative combining function
5459 * @return the task
5460 */
5461 public static <K,V> ForkJoinTask<Integer> reduceEntriesToInt
5462 (ConcurrentHashMap<K,V> map,
5463 IntFunction<Map.Entry<K,V>> transformer,
5464 int basis,
5465 IntBinaryOperator reducer) {
5466 if (transformer == null || reducer == null)
5467 throw new NullPointerException();
5468 return new MapReduceEntriesToIntTask<K,V>
5469 (map, null, -1, null, transformer, basis, reducer);
5470 }
5471 }
5472
5473 // -------------------------------------------------------
5474
5475 /*
5476 * Task classes. Coded in a regular but ugly format/style to
5477 * simplify checks that each variant differs in the right way from
5478 * others. The null screenings exist because compilers cannot tell
5479 * that we've already null-checked task arguments, so we force
5480 * simplest hoisted bypass to help avoid convoluted traps.
5481 */
5482
5483 @SuppressWarnings("serial") static final class ForEachKeyTask<K,V>
5484 extends Traverser<K,V,Void> {
5485 final Block<? super K> action;
5486 ForEachKeyTask
5487 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5488 Block<? super K> action) {
5489 super(m, p, b);
5490 this.action = action;
5491 }
5492 @SuppressWarnings("unchecked") public final void compute() {
5493 final Block<? super K> action;
5494 if ((action = this.action) != null) {
5495 for (int b; (b = preSplit()) > 0;)
5496 new ForEachKeyTask<K,V>(map, this, b, action).fork();
5497 while (advance() != null)
5498 action.accept((K)nextKey);
5499 propagateCompletion();
5500 }
5501 }
5502 }
5503
5504 @SuppressWarnings("serial") static final class ForEachValueTask<K,V>
5505 extends Traverser<K,V,Void> {
5506 final Block<? super V> action;
5507 ForEachValueTask
5508 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5509 Block<? super V> action) {
5510 super(m, p, b);
5511 this.action = action;
5512 }
5513 @SuppressWarnings("unchecked") public final void compute() {
5514 final Block<? super V> action;
5515 if ((action = this.action) != null) {
5516 for (int b; (b = preSplit()) > 0;)
5517 new ForEachValueTask<K,V>(map, this, b, action).fork();
5518 V v;
5519 while ((v = advance()) != null)
5520 action.accept(v);
5521 propagateCompletion();
5522 }
5523 }
5524 }
5525
5526 @SuppressWarnings("serial") static final class ForEachEntryTask<K,V>
5527 extends Traverser<K,V,Void> {
5528 final Block<? super Entry<K,V>> action;
5529 ForEachEntryTask
5530 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5531 Block<? super Entry<K,V>> action) {
5532 super(m, p, b);
5533 this.action = action;
5534 }
5535 @SuppressWarnings("unchecked") public final void compute() {
5536 final Block<? super Entry<K,V>> action;
5537 if ((action = this.action) != null) {
5538 for (int b; (b = preSplit()) > 0;)
5539 new ForEachEntryTask<K,V>(map, this, b, action).fork();
5540 V v;
5541 while ((v = advance()) != null)
5542 action.accept(entryFor((K)nextKey, v));
5543 propagateCompletion();
5544 }
5545 }
5546 }
5547
5548 @SuppressWarnings("serial") static final class ForEachMappingTask<K,V>
5549 extends Traverser<K,V,Void> {
5550 final BiBlock<? super K, ? super V> action;
5551 ForEachMappingTask
5552 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5553 BiBlock<? super K,? super V> action) {
5554 super(m, p, b);
5555 this.action = action;
5556 }
5557 @SuppressWarnings("unchecked") public final void compute() {
5558 final BiBlock<? super K, ? super V> action;
5559 if ((action = this.action) != null) {
5560 for (int b; (b = preSplit()) > 0;)
5561 new ForEachMappingTask<K,V>(map, this, b, action).fork();
5562 V v;
5563 while ((v = advance()) != null)
5564 action.accept((K)nextKey, v);
5565 propagateCompletion();
5566 }
5567 }
5568 }
5569
5570 @SuppressWarnings("serial") static final class ForEachTransformedKeyTask<K,V,U>
5571 extends Traverser<K,V,Void> {
5572 final Function<? super K, ? extends U> transformer;
5573 final Block<? super U> action;
5574 ForEachTransformedKeyTask
5575 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5576 Function<? super K, ? extends U> transformer, Block<? super U> action) {
5577 super(m, p, b);
5578 this.transformer = transformer; this.action = action;
5579 }
5580 @SuppressWarnings("unchecked") public final void compute() {
5581 final Function<? super K, ? extends U> transformer;
5582 final Block<? super U> action;
5583 if ((transformer = this.transformer) != null &&
5584 (action = this.action) != null) {
5585 for (int b; (b = preSplit()) > 0;)
5586 new ForEachTransformedKeyTask<K,V,U>
5587 (map, this, b, transformer, action).fork();
5588 U u;
5589 while (advance() != null) {
5590 if ((u = transformer.apply((K)nextKey)) != null)
5591 action.accept(u);
5592 }
5593 propagateCompletion();
5594 }
5595 }
5596 }
5597
5598 @SuppressWarnings("serial") static final class ForEachTransformedValueTask<K,V,U>
5599 extends Traverser<K,V,Void> {
5600 final Function<? super V, ? extends U> transformer;
5601 final Block<? super U> action;
5602 ForEachTransformedValueTask
5603 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5604 Function<? super V, ? extends U> transformer, Block<? super U> action) {
5605 super(m, p, b);
5606 this.transformer = transformer; this.action = action;
5607 }
5608 @SuppressWarnings("unchecked") public final void compute() {
5609 final Function<? super V, ? extends U> transformer;
5610 final Block<? super U> action;
5611 if ((transformer = this.transformer) != null &&
5612 (action = this.action) != null) {
5613 for (int b; (b = preSplit()) > 0;)
5614 new ForEachTransformedValueTask<K,V,U>
5615 (map, this, b, transformer, action).fork();
5616 V v; U u;
5617 while ((v = advance()) != null) {
5618 if ((u = transformer.apply(v)) != null)
5619 action.accept(u);
5620 }
5621 propagateCompletion();
5622 }
5623 }
5624 }
5625
5626 @SuppressWarnings("serial") static final class ForEachTransformedEntryTask<K,V,U>
5627 extends Traverser<K,V,Void> {
5628 final Function<Map.Entry<K,V>, ? extends U> transformer;
5629 final Block<? super U> action;
5630 ForEachTransformedEntryTask
5631 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5632 Function<Map.Entry<K,V>, ? extends U> transformer, Block<? super U> action) {
5633 super(m, p, b);
5634 this.transformer = transformer; this.action = action;
5635 }
5636 @SuppressWarnings("unchecked") public final void compute() {
5637 final Function<Map.Entry<K,V>, ? extends U> transformer;
5638 final Block<? super U> action;
5639 if ((transformer = this.transformer) != null &&
5640 (action = this.action) != null) {
5641 for (int b; (b = preSplit()) > 0;)
5642 new ForEachTransformedEntryTask<K,V,U>
5643 (map, this, b, transformer, action).fork();
5644 V v; U u;
5645 while ((v = advance()) != null) {
5646 if ((u = transformer.apply(entryFor((K)nextKey,
5647 v))) != null)
5648 action.accept(u);
5649 }
5650 propagateCompletion();
5651 }
5652 }
5653 }
5654
5655 @SuppressWarnings("serial") static final class ForEachTransformedMappingTask<K,V,U>
5656 extends Traverser<K,V,Void> {
5657 final BiFunction<? super K, ? super V, ? extends U> transformer;
5658 final Block<? super U> action;
5659 ForEachTransformedMappingTask
5660 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5661 BiFunction<? super K, ? super V, ? extends U> transformer,
5662 Block<? super U> action) {
5663 super(m, p, b);
5664 this.transformer = transformer; this.action = action;
5665 }
5666 @SuppressWarnings("unchecked") public final void compute() {
5667 final BiFunction<? super K, ? super V, ? extends U> transformer;
5668 final Block<? super U> action;
5669 if ((transformer = this.transformer) != null &&
5670 (action = this.action) != null) {
5671 for (int b; (b = preSplit()) > 0;)
5672 new ForEachTransformedMappingTask<K,V,U>
5673 (map, this, b, transformer, action).fork();
5674 V v; U u;
5675 while ((v = advance()) != null) {
5676 if ((u = transformer.apply((K)nextKey, v)) != null)
5677 action.accept(u);
5678 }
5679 propagateCompletion();
5680 }
5681 }
5682 }
5683
5684 @SuppressWarnings("serial") static final class SearchKeysTask<K,V,U>
5685 extends Traverser<K,V,U> {
5686 final Function<? super K, ? extends U> searchFunction;
5687 final AtomicReference<U> result;
5688 SearchKeysTask
5689 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5690 Function<? super K, ? extends U> searchFunction,
5691 AtomicReference<U> result) {
5692 super(m, p, b);
5693 this.searchFunction = searchFunction; this.result = result;
5694 }
5695 public final U getRawResult() { return result.get(); }
5696 @SuppressWarnings("unchecked") public final void compute() {
5697 final Function<? super K, ? extends U> searchFunction;
5698 final AtomicReference<U> result;
5699 if ((searchFunction = this.searchFunction) != null &&
5700 (result = this.result) != null) {
5701 for (int b;;) {
5702 if (result.get() != null)
5703 return;
5704 if ((b = preSplit()) <= 0)
5705 break;
5706 new SearchKeysTask<K,V,U>
5707 (map, this, b, searchFunction, result).fork();
5708 }
5709 while (result.get() == null) {
5710 U u;
5711 if (advance() == null) {
5712 propagateCompletion();
5713 break;
5714 }
5715 if ((u = searchFunction.apply((K)nextKey)) != null) {
5716 if (result.compareAndSet(null, u))
5717 quietlyCompleteRoot();
5718 break;
5719 }
5720 }
5721 }
5722 }
5723 }
5724
5725 @SuppressWarnings("serial") static final class SearchValuesTask<K,V,U>
5726 extends Traverser<K,V,U> {
5727 final Function<? super V, ? extends U> searchFunction;
5728 final AtomicReference<U> result;
5729 SearchValuesTask
5730 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5731 Function<? super V, ? extends U> searchFunction,
5732 AtomicReference<U> result) {
5733 super(m, p, b);
5734 this.searchFunction = searchFunction; this.result = result;
5735 }
5736 public final U getRawResult() { return result.get(); }
5737 @SuppressWarnings("unchecked") public final void compute() {
5738 final Function<? super V, ? extends U> searchFunction;
5739 final AtomicReference<U> result;
5740 if ((searchFunction = this.searchFunction) != null &&
5741 (result = this.result) != null) {
5742 for (int b;;) {
5743 if (result.get() != null)
5744 return;
5745 if ((b = preSplit()) <= 0)
5746 break;
5747 new SearchValuesTask<K,V,U>
5748 (map, this, b, searchFunction, result).fork();
5749 }
5750 while (result.get() == null) {
5751 V v; U u;
5752 if ((v = advance()) == null) {
5753 propagateCompletion();
5754 break;
5755 }
5756 if ((u = searchFunction.apply(v)) != null) {
5757 if (result.compareAndSet(null, u))
5758 quietlyCompleteRoot();
5759 break;
5760 }
5761 }
5762 }
5763 }
5764 }
5765
5766 @SuppressWarnings("serial") static final class SearchEntriesTask<K,V,U>
5767 extends Traverser<K,V,U> {
5768 final Function<Entry<K,V>, ? extends U> searchFunction;
5769 final AtomicReference<U> result;
5770 SearchEntriesTask
5771 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5772 Function<Entry<K,V>, ? extends U> searchFunction,
5773 AtomicReference<U> result) {
5774 super(m, p, b);
5775 this.searchFunction = searchFunction; this.result = result;
5776 }
5777 public final U getRawResult() { return result.get(); }
5778 @SuppressWarnings("unchecked") public final void compute() {
5779 final Function<Entry<K,V>, ? extends U> searchFunction;
5780 final AtomicReference<U> result;
5781 if ((searchFunction = this.searchFunction) != null &&
5782 (result = this.result) != null) {
5783 for (int b;;) {
5784 if (result.get() != null)
5785 return;
5786 if ((b = preSplit()) <= 0)
5787 break;
5788 new SearchEntriesTask<K,V,U>
5789 (map, this, b, searchFunction, result).fork();
5790 }
5791 while (result.get() == null) {
5792 V v; U u;
5793 if ((v = advance()) == null) {
5794 propagateCompletion();
5795 break;
5796 }
5797 if ((u = searchFunction.apply(entryFor((K)nextKey,
5798 v))) != null) {
5799 if (result.compareAndSet(null, u))
5800 quietlyCompleteRoot();
5801 return;
5802 }
5803 }
5804 }
5805 }
5806 }
5807
5808 @SuppressWarnings("serial") static final class SearchMappingsTask<K,V,U>
5809 extends Traverser<K,V,U> {
5810 final BiFunction<? super K, ? super V, ? extends U> searchFunction;
5811 final AtomicReference<U> result;
5812 SearchMappingsTask
5813 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5814 BiFunction<? super K, ? super V, ? extends U> searchFunction,
5815 AtomicReference<U> result) {
5816 super(m, p, b);
5817 this.searchFunction = searchFunction; this.result = result;
5818 }
5819 public final U getRawResult() { return result.get(); }
5820 @SuppressWarnings("unchecked") public final void compute() {
5821 final BiFunction<? super K, ? super V, ? extends U> searchFunction;
5822 final AtomicReference<U> result;
5823 if ((searchFunction = this.searchFunction) != null &&
5824 (result = this.result) != null) {
5825 for (int b;;) {
5826 if (result.get() != null)
5827 return;
5828 if ((b = preSplit()) <= 0)
5829 break;
5830 new SearchMappingsTask<K,V,U>
5831 (map, this, b, searchFunction, result).fork();
5832 }
5833 while (result.get() == null) {
5834 V v; U u;
5835 if ((v = advance()) == null) {
5836 propagateCompletion();
5837 break;
5838 }
5839 if ((u = searchFunction.apply((K)nextKey, v)) != null) {
5840 if (result.compareAndSet(null, u))
5841 quietlyCompleteRoot();
5842 break;
5843 }
5844 }
5845 }
5846 }
5847 }
5848
5849 @SuppressWarnings("serial") static final class ReduceKeysTask<K,V>
5850 extends Traverser<K,V,K> {
5851 final BiFunction<? super K, ? super K, ? extends K> reducer;
5852 K result;
5853 ReduceKeysTask<K,V> rights, nextRight;
5854 ReduceKeysTask
5855 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5856 ReduceKeysTask<K,V> nextRight,
5857 BiFunction<? super K, ? super K, ? extends K> reducer) {
5858 super(m, p, b); this.nextRight = nextRight;
5859 this.reducer = reducer;
5860 }
5861 public final K getRawResult() { return result; }
5862 @SuppressWarnings("unchecked") public final void compute() {
5863 final BiFunction<? super K, ? super K, ? extends K> reducer;
5864 if ((reducer = this.reducer) != null) {
5865 for (int b; (b = preSplit()) > 0;)
5866 (rights = new ReduceKeysTask<K,V>
5867 (map, this, b, rights, reducer)).fork();
5868 K r = null;
5869 while (advance() != null) {
5870 K u = (K)nextKey;
5871 r = (r == null) ? u : u == null ? r : reducer.apply(r, u);
5872 }
5873 result = r;
5874 CountedCompleter<?> c;
5875 for (c = firstComplete(); c != null; c = c.nextComplete()) {
5876 ReduceKeysTask<K,V>
5877 t = (ReduceKeysTask<K,V>)c,
5878 s = t.rights;
5879 while (s != null) {
5880 K tr, sr;
5881 if ((sr = s.result) != null)
5882 t.result = (((tr = t.result) == null) ? sr :
5883 reducer.apply(tr, sr));
5884 s = t.rights = s.nextRight;
5885 }
5886 }
5887 }
5888 }
5889 }
5890
5891 @SuppressWarnings("serial") static final class ReduceValuesTask<K,V>
5892 extends Traverser<K,V,V> {
5893 final BiFunction<? super V, ? super V, ? extends V> reducer;
5894 V result;
5895 ReduceValuesTask<K,V> rights, nextRight;
5896 ReduceValuesTask
5897 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5898 ReduceValuesTask<K,V> nextRight,
5899 BiFunction<? super V, ? super V, ? extends V> reducer) {
5900 super(m, p, b); this.nextRight = nextRight;
5901 this.reducer = reducer;
5902 }
5903 public final V getRawResult() { return result; }
5904 @SuppressWarnings("unchecked") public final void compute() {
5905 final BiFunction<? super V, ? super V, ? extends V> reducer;
5906 if ((reducer = this.reducer) != null) {
5907 for (int b; (b = preSplit()) > 0;)
5908 (rights = new ReduceValuesTask<K,V>
5909 (map, this, b, rights, reducer)).fork();
5910 V r = null, v;
5911 while ((v = advance()) != null)
5912 r = (r == null) ? v : v == null ? r : reducer.apply(r, v);
5913 result = r;
5914 CountedCompleter<?> c;
5915 for (c = firstComplete(); c != null; c = c.nextComplete()) {
5916 ReduceValuesTask<K,V>
5917 t = (ReduceValuesTask<K,V>)c,
5918 s = t.rights;
5919 while (s != null) {
5920 V tr, sr;
5921 if ((sr = s.result) != null)
5922 t.result = (((tr = t.result) == null) ? sr :
5923 reducer.apply(tr, sr));
5924 s = t.rights = s.nextRight;
5925 }
5926 }
5927 }
5928 }
5929 }
5930
5931 @SuppressWarnings("serial") static final class ReduceEntriesTask<K,V>
5932 extends Traverser<K,V,Map.Entry<K,V>> {
5933 final BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer;
5934 Map.Entry<K,V> result;
5935 ReduceEntriesTask<K,V> rights, nextRight;
5936 ReduceEntriesTask
5937 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5938 ReduceEntriesTask<K,V> nextRight,
5939 BiFunction<Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer) {
5940 super(m, p, b); this.nextRight = nextRight;
5941 this.reducer = reducer;
5942 }
5943 public final Map.Entry<K,V> getRawResult() { return result; }
5944 @SuppressWarnings("unchecked") public final void compute() {
5945 final BiFunction<Map.Entry<K,V>, Map.Entry<K,V>, ? extends Map.Entry<K,V>> reducer;
5946 if ((reducer = this.reducer) != null) {
5947 for (int b; (b = preSplit()) > 0;)
5948 (rights = new ReduceEntriesTask<K,V>
5949 (map, this, b, rights, reducer)).fork();
5950 Map.Entry<K,V> r = null;
5951 V v;
5952 while ((v = advance()) != null) {
5953 Map.Entry<K,V> u = entryFor((K)nextKey, v);
5954 r = (r == null) ? u : reducer.apply(r, u);
5955 }
5956 result = r;
5957 CountedCompleter<?> c;
5958 for (c = firstComplete(); c != null; c = c.nextComplete()) {
5959 ReduceEntriesTask<K,V>
5960 t = (ReduceEntriesTask<K,V>)c,
5961 s = t.rights;
5962 while (s != null) {
5963 Map.Entry<K,V> tr, sr;
5964 if ((sr = s.result) != null)
5965 t.result = (((tr = t.result) == null) ? sr :
5966 reducer.apply(tr, sr));
5967 s = t.rights = s.nextRight;
5968 }
5969 }
5970 }
5971 }
5972 }
5973
5974 @SuppressWarnings("serial") static final class MapReduceKeysTask<K,V,U>
5975 extends Traverser<K,V,U> {
5976 final Function<? super K, ? extends U> transformer;
5977 final BiFunction<? super U, ? super U, ? extends U> reducer;
5978 U result;
5979 MapReduceKeysTask<K,V,U> rights, nextRight;
5980 MapReduceKeysTask
5981 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
5982 MapReduceKeysTask<K,V,U> nextRight,
5983 Function<? super K, ? extends U> transformer,
5984 BiFunction<? super U, ? super U, ? extends U> reducer) {
5985 super(m, p, b); this.nextRight = nextRight;
5986 this.transformer = transformer;
5987 this.reducer = reducer;
5988 }
5989 public final U getRawResult() { return result; }
5990 @SuppressWarnings("unchecked") public final void compute() {
5991 final Function<? super K, ? extends U> transformer;
5992 final BiFunction<? super U, ? super U, ? extends U> reducer;
5993 if ((transformer = this.transformer) != null &&
5994 (reducer = this.reducer) != null) {
5995 for (int b; (b = preSplit()) > 0;)
5996 (rights = new MapReduceKeysTask<K,V,U>
5997 (map, this, b, rights, transformer, reducer)).fork();
5998 U r = null, u;
5999 while (advance() != null) {
6000 if ((u = transformer.apply((K)nextKey)) != null)
6001 r = (r == null) ? u : reducer.apply(r, u);
6002 }
6003 result = r;
6004 CountedCompleter<?> c;
6005 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6006 MapReduceKeysTask<K,V,U>
6007 t = (MapReduceKeysTask<K,V,U>)c,
6008 s = t.rights;
6009 while (s != null) {
6010 U tr, sr;
6011 if ((sr = s.result) != null)
6012 t.result = (((tr = t.result) == null) ? sr :
6013 reducer.apply(tr, sr));
6014 s = t.rights = s.nextRight;
6015 }
6016 }
6017 }
6018 }
6019 }
6020
6021 @SuppressWarnings("serial") static final class MapReduceValuesTask<K,V,U>
6022 extends Traverser<K,V,U> {
6023 final Function<? super V, ? extends U> transformer;
6024 final BiFunction<? super U, ? super U, ? extends U> reducer;
6025 U result;
6026 MapReduceValuesTask<K,V,U> rights, nextRight;
6027 MapReduceValuesTask
6028 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6029 MapReduceValuesTask<K,V,U> nextRight,
6030 Function<? super V, ? extends U> transformer,
6031 BiFunction<? super U, ? super U, ? extends U> reducer) {
6032 super(m, p, b); this.nextRight = nextRight;
6033 this.transformer = transformer;
6034 this.reducer = reducer;
6035 }
6036 public final U getRawResult() { return result; }
6037 @SuppressWarnings("unchecked") public final void compute() {
6038 final Function<? super V, ? extends U> transformer;
6039 final BiFunction<? super U, ? super U, ? extends U> reducer;
6040 if ((transformer = this.transformer) != null &&
6041 (reducer = this.reducer) != null) {
6042 for (int b; (b = preSplit()) > 0;)
6043 (rights = new MapReduceValuesTask<K,V,U>
6044 (map, this, b, rights, transformer, reducer)).fork();
6045 U r = null, u;
6046 V v;
6047 while ((v = advance()) != null) {
6048 if ((u = transformer.apply(v)) != null)
6049 r = (r == null) ? u : reducer.apply(r, u);
6050 }
6051 result = r;
6052 CountedCompleter<?> c;
6053 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6054 MapReduceValuesTask<K,V,U>
6055 t = (MapReduceValuesTask<K,V,U>)c,
6056 s = t.rights;
6057 while (s != null) {
6058 U tr, sr;
6059 if ((sr = s.result) != null)
6060 t.result = (((tr = t.result) == null) ? sr :
6061 reducer.apply(tr, sr));
6062 s = t.rights = s.nextRight;
6063 }
6064 }
6065 }
6066 }
6067 }
6068
6069 @SuppressWarnings("serial") static final class MapReduceEntriesTask<K,V,U>
6070 extends Traverser<K,V,U> {
6071 final Function<Map.Entry<K,V>, ? extends U> transformer;
6072 final BiFunction<? super U, ? super U, ? extends U> reducer;
6073 U result;
6074 MapReduceEntriesTask<K,V,U> rights, nextRight;
6075 MapReduceEntriesTask
6076 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6077 MapReduceEntriesTask<K,V,U> nextRight,
6078 Function<Map.Entry<K,V>, ? extends U> transformer,
6079 BiFunction<? super U, ? super U, ? extends U> reducer) {
6080 super(m, p, b); this.nextRight = nextRight;
6081 this.transformer = transformer;
6082 this.reducer = reducer;
6083 }
6084 public final U getRawResult() { return result; }
6085 @SuppressWarnings("unchecked") public final void compute() {
6086 final Function<Map.Entry<K,V>, ? extends U> transformer;
6087 final BiFunction<? super U, ? super U, ? extends U> reducer;
6088 if ((transformer = this.transformer) != null &&
6089 (reducer = this.reducer) != null) {
6090 for (int b; (b = preSplit()) > 0;)
6091 (rights = new MapReduceEntriesTask<K,V,U>
6092 (map, this, b, rights, transformer, reducer)).fork();
6093 U r = null, u;
6094 V v;
6095 while ((v = advance()) != null) {
6096 if ((u = transformer.apply(entryFor((K)nextKey,
6097 v))) != null)
6098 r = (r == null) ? u : reducer.apply(r, u);
6099 }
6100 result = r;
6101 CountedCompleter<?> c;
6102 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6103 MapReduceEntriesTask<K,V,U>
6104 t = (MapReduceEntriesTask<K,V,U>)c,
6105 s = t.rights;
6106 while (s != null) {
6107 U tr, sr;
6108 if ((sr = s.result) != null)
6109 t.result = (((tr = t.result) == null) ? sr :
6110 reducer.apply(tr, sr));
6111 s = t.rights = s.nextRight;
6112 }
6113 }
6114 }
6115 }
6116 }
6117
6118 @SuppressWarnings("serial") static final class MapReduceMappingsTask<K,V,U>
6119 extends Traverser<K,V,U> {
6120 final BiFunction<? super K, ? super V, ? extends U> transformer;
6121 final BiFunction<? super U, ? super U, ? extends U> reducer;
6122 U result;
6123 MapReduceMappingsTask<K,V,U> rights, nextRight;
6124 MapReduceMappingsTask
6125 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6126 MapReduceMappingsTask<K,V,U> nextRight,
6127 BiFunction<? super K, ? super V, ? extends U> transformer,
6128 BiFunction<? super U, ? super U, ? extends U> reducer) {
6129 super(m, p, b); this.nextRight = nextRight;
6130 this.transformer = transformer;
6131 this.reducer = reducer;
6132 }
6133 public final U getRawResult() { return result; }
6134 @SuppressWarnings("unchecked") public final void compute() {
6135 final BiFunction<? super K, ? super V, ? extends U> transformer;
6136 final BiFunction<? super U, ? super U, ? extends U> reducer;
6137 if ((transformer = this.transformer) != null &&
6138 (reducer = this.reducer) != null) {
6139 for (int b; (b = preSplit()) > 0;)
6140 (rights = new MapReduceMappingsTask<K,V,U>
6141 (map, this, b, rights, transformer, reducer)).fork();
6142 U r = null, u;
6143 V v;
6144 while ((v = advance()) != null) {
6145 if ((u = transformer.apply((K)nextKey, v)) != null)
6146 r = (r == null) ? u : reducer.apply(r, u);
6147 }
6148 result = r;
6149 CountedCompleter<?> c;
6150 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6151 MapReduceMappingsTask<K,V,U>
6152 t = (MapReduceMappingsTask<K,V,U>)c,
6153 s = t.rights;
6154 while (s != null) {
6155 U tr, sr;
6156 if ((sr = s.result) != null)
6157 t.result = (((tr = t.result) == null) ? sr :
6158 reducer.apply(tr, sr));
6159 s = t.rights = s.nextRight;
6160 }
6161 }
6162 }
6163 }
6164 }
6165
6166 @SuppressWarnings("serial") static final class MapReduceKeysToDoubleTask<K,V>
6167 extends Traverser<K,V,Double> {
6168 final DoubleFunction<? super K> transformer;
6169 final DoubleBinaryOperator reducer;
6170 final double basis;
6171 double result;
6172 MapReduceKeysToDoubleTask<K,V> rights, nextRight;
6173 MapReduceKeysToDoubleTask
6174 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6175 MapReduceKeysToDoubleTask<K,V> nextRight,
6176 DoubleFunction<? super K> transformer,
6177 double basis,
6178 DoubleBinaryOperator reducer) {
6179 super(m, p, b); this.nextRight = nextRight;
6180 this.transformer = transformer;
6181 this.basis = basis; this.reducer = reducer;
6182 }
6183 public final Double getRawResult() { return result; }
6184 @SuppressWarnings("unchecked") public final void compute() {
6185 final DoubleFunction<? super K> transformer;
6186 final DoubleBinaryOperator reducer;
6187 if ((transformer = this.transformer) != null &&
6188 (reducer = this.reducer) != null) {
6189 double r = this.basis;
6190 for (int b; (b = preSplit()) > 0;)
6191 (rights = new MapReduceKeysToDoubleTask<K,V>
6192 (map, this, b, rights, transformer, r, reducer)).fork();
6193 while (advance() != null)
6194 r = reducer.applyAsDouble(r, transformer.applyAsDouble((K)nextKey));
6195 result = r;
6196 CountedCompleter<?> c;
6197 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6198 MapReduceKeysToDoubleTask<K,V>
6199 t = (MapReduceKeysToDoubleTask<K,V>)c,
6200 s = t.rights;
6201 while (s != null) {
6202 t.result = reducer.applyAsDouble(t.result, s.result);
6203 s = t.rights = s.nextRight;
6204 }
6205 }
6206 }
6207 }
6208 }
6209
6210 @SuppressWarnings("serial") static final class MapReduceValuesToDoubleTask<K,V>
6211 extends Traverser<K,V,Double> {
6212 final DoubleFunction<? super V> transformer;
6213 final DoubleBinaryOperator reducer;
6214 final double basis;
6215 double result;
6216 MapReduceValuesToDoubleTask<K,V> rights, nextRight;
6217 MapReduceValuesToDoubleTask
6218 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6219 MapReduceValuesToDoubleTask<K,V> nextRight,
6220 DoubleFunction<? super V> transformer,
6221 double basis,
6222 DoubleBinaryOperator reducer) {
6223 super(m, p, b); this.nextRight = nextRight;
6224 this.transformer = transformer;
6225 this.basis = basis; this.reducer = reducer;
6226 }
6227 public final Double getRawResult() { return result; }
6228 @SuppressWarnings("unchecked") public final void compute() {
6229 final DoubleFunction<? super V> transformer;
6230 final DoubleBinaryOperator reducer;
6231 if ((transformer = this.transformer) != null &&
6232 (reducer = this.reducer) != null) {
6233 double r = this.basis;
6234 for (int b; (b = preSplit()) > 0;)
6235 (rights = new MapReduceValuesToDoubleTask<K,V>
6236 (map, this, b, rights, transformer, r, reducer)).fork();
6237 V v;
6238 while ((v = advance()) != null)
6239 r = reducer.applyAsDouble(r, transformer.applyAsDouble(v));
6240 result = r;
6241 CountedCompleter<?> c;
6242 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6243 MapReduceValuesToDoubleTask<K,V>
6244 t = (MapReduceValuesToDoubleTask<K,V>)c,
6245 s = t.rights;
6246 while (s != null) {
6247 t.result = reducer.applyAsDouble(t.result, s.result);
6248 s = t.rights = s.nextRight;
6249 }
6250 }
6251 }
6252 }
6253 }
6254
6255 @SuppressWarnings("serial") static final class MapReduceEntriesToDoubleTask<K,V>
6256 extends Traverser<K,V,Double> {
6257 final DoubleFunction<Map.Entry<K,V>> transformer;
6258 final DoubleBinaryOperator reducer;
6259 final double basis;
6260 double result;
6261 MapReduceEntriesToDoubleTask<K,V> rights, nextRight;
6262 MapReduceEntriesToDoubleTask
6263 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6264 MapReduceEntriesToDoubleTask<K,V> nextRight,
6265 DoubleFunction<Map.Entry<K,V>> transformer,
6266 double basis,
6267 DoubleBinaryOperator reducer) {
6268 super(m, p, b); this.nextRight = nextRight;
6269 this.transformer = transformer;
6270 this.basis = basis; this.reducer = reducer;
6271 }
6272 public final Double getRawResult() { return result; }
6273 @SuppressWarnings("unchecked") public final void compute() {
6274 final DoubleFunction<Map.Entry<K,V>> transformer;
6275 final DoubleBinaryOperator reducer;
6276 if ((transformer = this.transformer) != null &&
6277 (reducer = this.reducer) != null) {
6278 double r = this.basis;
6279 for (int b; (b = preSplit()) > 0;)
6280 (rights = new MapReduceEntriesToDoubleTask<K,V>
6281 (map, this, b, rights, transformer, r, reducer)).fork();
6282 V v;
6283 while ((v = advance()) != null)
6284 r = reducer.applyAsDouble(r, transformer.applyAsDouble(entryFor((K)nextKey,
6285 v)));
6286 result = r;
6287 CountedCompleter<?> c;
6288 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6289 MapReduceEntriesToDoubleTask<K,V>
6290 t = (MapReduceEntriesToDoubleTask<K,V>)c,
6291 s = t.rights;
6292 while (s != null) {
6293 t.result = reducer.applyAsDouble(t.result, s.result);
6294 s = t.rights = s.nextRight;
6295 }
6296 }
6297 }
6298 }
6299 }
6300
6301 @SuppressWarnings("serial") static final class MapReduceMappingsToDoubleTask<K,V>
6302 extends Traverser<K,V,Double> {
6303 final DoubleBiFunction<? super K, ? super V> transformer;
6304 final DoubleBinaryOperator reducer;
6305 final double basis;
6306 double result;
6307 MapReduceMappingsToDoubleTask<K,V> rights, nextRight;
6308 MapReduceMappingsToDoubleTask
6309 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6310 MapReduceMappingsToDoubleTask<K,V> nextRight,
6311 DoubleBiFunction<? super K, ? super V> transformer,
6312 double basis,
6313 DoubleBinaryOperator reducer) {
6314 super(m, p, b); this.nextRight = nextRight;
6315 this.transformer = transformer;
6316 this.basis = basis; this.reducer = reducer;
6317 }
6318 public final Double getRawResult() { return result; }
6319 @SuppressWarnings("unchecked") public final void compute() {
6320 final DoubleBiFunction<? super K, ? super V> transformer;
6321 final DoubleBinaryOperator reducer;
6322 if ((transformer = this.transformer) != null &&
6323 (reducer = this.reducer) != null) {
6324 double r = this.basis;
6325 for (int b; (b = preSplit()) > 0;)
6326 (rights = new MapReduceMappingsToDoubleTask<K,V>
6327 (map, this, b, rights, transformer, r, reducer)).fork();
6328 V v;
6329 while ((v = advance()) != null)
6330 r = reducer.applyAsDouble(r, transformer.applyAsDouble((K)nextKey, v));
6331 result = r;
6332 CountedCompleter<?> c;
6333 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6334 MapReduceMappingsToDoubleTask<K,V>
6335 t = (MapReduceMappingsToDoubleTask<K,V>)c,
6336 s = t.rights;
6337 while (s != null) {
6338 t.result = reducer.applyAsDouble(t.result, s.result);
6339 s = t.rights = s.nextRight;
6340 }
6341 }
6342 }
6343 }
6344 }
6345
6346 @SuppressWarnings("serial") static final class MapReduceKeysToLongTask<K,V>
6347 extends Traverser<K,V,Long> {
6348 final LongFunction<? super K> transformer;
6349 final LongBinaryOperator reducer;
6350 final long basis;
6351 long result;
6352 MapReduceKeysToLongTask<K,V> rights, nextRight;
6353 MapReduceKeysToLongTask
6354 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6355 MapReduceKeysToLongTask<K,V> nextRight,
6356 LongFunction<? super K> transformer,
6357 long basis,
6358 LongBinaryOperator reducer) {
6359 super(m, p, b); this.nextRight = nextRight;
6360 this.transformer = transformer;
6361 this.basis = basis; this.reducer = reducer;
6362 }
6363 public final Long getRawResult() { return result; }
6364 @SuppressWarnings("unchecked") public final void compute() {
6365 final LongFunction<? super K> transformer;
6366 final LongBinaryOperator reducer;
6367 if ((transformer = this.transformer) != null &&
6368 (reducer = this.reducer) != null) {
6369 long r = this.basis;
6370 for (int b; (b = preSplit()) > 0;)
6371 (rights = new MapReduceKeysToLongTask<K,V>
6372 (map, this, b, rights, transformer, r, reducer)).fork();
6373 while (advance() != null)
6374 r = reducer.applyAsLong(r, transformer.applyAsLong((K)nextKey));
6375 result = r;
6376 CountedCompleter<?> c;
6377 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6378 MapReduceKeysToLongTask<K,V>
6379 t = (MapReduceKeysToLongTask<K,V>)c,
6380 s = t.rights;
6381 while (s != null) {
6382 t.result = reducer.applyAsLong(t.result, s.result);
6383 s = t.rights = s.nextRight;
6384 }
6385 }
6386 }
6387 }
6388 }
6389
6390 @SuppressWarnings("serial") static final class MapReduceValuesToLongTask<K,V>
6391 extends Traverser<K,V,Long> {
6392 final LongFunction<? super V> transformer;
6393 final LongBinaryOperator reducer;
6394 final long basis;
6395 long result;
6396 MapReduceValuesToLongTask<K,V> rights, nextRight;
6397 MapReduceValuesToLongTask
6398 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6399 MapReduceValuesToLongTask<K,V> nextRight,
6400 LongFunction<? super V> transformer,
6401 long basis,
6402 LongBinaryOperator reducer) {
6403 super(m, p, b); this.nextRight = nextRight;
6404 this.transformer = transformer;
6405 this.basis = basis; this.reducer = reducer;
6406 }
6407 public final Long getRawResult() { return result; }
6408 @SuppressWarnings("unchecked") public final void compute() {
6409 final LongFunction<? super V> transformer;
6410 final LongBinaryOperator reducer;
6411 if ((transformer = this.transformer) != null &&
6412 (reducer = this.reducer) != null) {
6413 long r = this.basis;
6414 for (int b; (b = preSplit()) > 0;)
6415 (rights = new MapReduceValuesToLongTask<K,V>
6416 (map, this, b, rights, transformer, r, reducer)).fork();
6417 V v;
6418 while ((v = advance()) != null)
6419 r = reducer.applyAsLong(r, transformer.applyAsLong(v));
6420 result = r;
6421 CountedCompleter<?> c;
6422 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6423 MapReduceValuesToLongTask<K,V>
6424 t = (MapReduceValuesToLongTask<K,V>)c,
6425 s = t.rights;
6426 while (s != null) {
6427 t.result = reducer.applyAsLong(t.result, s.result);
6428 s = t.rights = s.nextRight;
6429 }
6430 }
6431 }
6432 }
6433 }
6434
6435 @SuppressWarnings("serial") static final class MapReduceEntriesToLongTask<K,V>
6436 extends Traverser<K,V,Long> {
6437 final LongFunction<Map.Entry<K,V>> transformer;
6438 final LongBinaryOperator reducer;
6439 final long basis;
6440 long result;
6441 MapReduceEntriesToLongTask<K,V> rights, nextRight;
6442 MapReduceEntriesToLongTask
6443 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6444 MapReduceEntriesToLongTask<K,V> nextRight,
6445 LongFunction<Map.Entry<K,V>> transformer,
6446 long basis,
6447 LongBinaryOperator reducer) {
6448 super(m, p, b); this.nextRight = nextRight;
6449 this.transformer = transformer;
6450 this.basis = basis; this.reducer = reducer;
6451 }
6452 public final Long getRawResult() { return result; }
6453 @SuppressWarnings("unchecked") public final void compute() {
6454 final LongFunction<Map.Entry<K,V>> transformer;
6455 final LongBinaryOperator reducer;
6456 if ((transformer = this.transformer) != null &&
6457 (reducer = this.reducer) != null) {
6458 long r = this.basis;
6459 for (int b; (b = preSplit()) > 0;)
6460 (rights = new MapReduceEntriesToLongTask<K,V>
6461 (map, this, b, rights, transformer, r, reducer)).fork();
6462 V v;
6463 while ((v = advance()) != null)
6464 r = reducer.applyAsLong(r, transformer.applyAsLong(entryFor((K)nextKey, v)));
6465 result = r;
6466 CountedCompleter<?> c;
6467 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6468 MapReduceEntriesToLongTask<K,V>
6469 t = (MapReduceEntriesToLongTask<K,V>)c,
6470 s = t.rights;
6471 while (s != null) {
6472 t.result = reducer.applyAsLong(t.result, s.result);
6473 s = t.rights = s.nextRight;
6474 }
6475 }
6476 }
6477 }
6478 }
6479
6480 @SuppressWarnings("serial") static final class MapReduceMappingsToLongTask<K,V>
6481 extends Traverser<K,V,Long> {
6482 final LongBiFunction<? super K, ? super V> transformer;
6483 final LongBinaryOperator reducer;
6484 final long basis;
6485 long result;
6486 MapReduceMappingsToLongTask<K,V> rights, nextRight;
6487 MapReduceMappingsToLongTask
6488 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6489 MapReduceMappingsToLongTask<K,V> nextRight,
6490 LongBiFunction<? super K, ? super V> transformer,
6491 long basis,
6492 LongBinaryOperator reducer) {
6493 super(m, p, b); this.nextRight = nextRight;
6494 this.transformer = transformer;
6495 this.basis = basis; this.reducer = reducer;
6496 }
6497 public final Long getRawResult() { return result; }
6498 @SuppressWarnings("unchecked") public final void compute() {
6499 final LongBiFunction<? super K, ? super V> transformer;
6500 final LongBinaryOperator reducer;
6501 if ((transformer = this.transformer) != null &&
6502 (reducer = this.reducer) != null) {
6503 long r = this.basis;
6504 for (int b; (b = preSplit()) > 0;)
6505 (rights = new MapReduceMappingsToLongTask<K,V>
6506 (map, this, b, rights, transformer, r, reducer)).fork();
6507 V v;
6508 while ((v = advance()) != null)
6509 r = reducer.applyAsLong(r, transformer.applyAsLong((K)nextKey, v));
6510 result = r;
6511 CountedCompleter<?> c;
6512 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6513 MapReduceMappingsToLongTask<K,V>
6514 t = (MapReduceMappingsToLongTask<K,V>)c,
6515 s = t.rights;
6516 while (s != null) {
6517 t.result = reducer.applyAsLong(t.result, s.result);
6518 s = t.rights = s.nextRight;
6519 }
6520 }
6521 }
6522 }
6523 }
6524
6525 @SuppressWarnings("serial") static final class MapReduceKeysToIntTask<K,V>
6526 extends Traverser<K,V,Integer> {
6527 final IntFunction<? super K> transformer;
6528 final IntBinaryOperator reducer;
6529 final int basis;
6530 int result;
6531 MapReduceKeysToIntTask<K,V> rights, nextRight;
6532 MapReduceKeysToIntTask
6533 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6534 MapReduceKeysToIntTask<K,V> nextRight,
6535 IntFunction<? super K> transformer,
6536 int basis,
6537 IntBinaryOperator reducer) {
6538 super(m, p, b); this.nextRight = nextRight;
6539 this.transformer = transformer;
6540 this.basis = basis; this.reducer = reducer;
6541 }
6542 public final Integer getRawResult() { return result; }
6543 @SuppressWarnings("unchecked") public final void compute() {
6544 final IntFunction<? super K> transformer;
6545 final IntBinaryOperator reducer;
6546 if ((transformer = this.transformer) != null &&
6547 (reducer = this.reducer) != null) {
6548 int r = this.basis;
6549 for (int b; (b = preSplit()) > 0;)
6550 (rights = new MapReduceKeysToIntTask<K,V>
6551 (map, this, b, rights, transformer, r, reducer)).fork();
6552 while (advance() != null)
6553 r = reducer.applyAsInt(r, transformer.applyAsInt((K)nextKey));
6554 result = r;
6555 CountedCompleter<?> c;
6556 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6557 MapReduceKeysToIntTask<K,V>
6558 t = (MapReduceKeysToIntTask<K,V>)c,
6559 s = t.rights;
6560 while (s != null) {
6561 t.result = reducer.applyAsInt(t.result, s.result);
6562 s = t.rights = s.nextRight;
6563 }
6564 }
6565 }
6566 }
6567 }
6568
6569 @SuppressWarnings("serial") static final class MapReduceValuesToIntTask<K,V>
6570 extends Traverser<K,V,Integer> {
6571 final IntFunction<? super V> transformer;
6572 final IntBinaryOperator reducer;
6573 final int basis;
6574 int result;
6575 MapReduceValuesToIntTask<K,V> rights, nextRight;
6576 MapReduceValuesToIntTask
6577 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6578 MapReduceValuesToIntTask<K,V> nextRight,
6579 IntFunction<? super V> transformer,
6580 int basis,
6581 IntBinaryOperator reducer) {
6582 super(m, p, b); this.nextRight = nextRight;
6583 this.transformer = transformer;
6584 this.basis = basis; this.reducer = reducer;
6585 }
6586 public final Integer getRawResult() { return result; }
6587 @SuppressWarnings("unchecked") public final void compute() {
6588 final IntFunction<? super V> transformer;
6589 final IntBinaryOperator reducer;
6590 if ((transformer = this.transformer) != null &&
6591 (reducer = this.reducer) != null) {
6592 int r = this.basis;
6593 for (int b; (b = preSplit()) > 0;)
6594 (rights = new MapReduceValuesToIntTask<K,V>
6595 (map, this, b, rights, transformer, r, reducer)).fork();
6596 V v;
6597 while ((v = advance()) != null)
6598 r = reducer.applyAsInt(r, transformer.applyAsInt(v));
6599 result = r;
6600 CountedCompleter<?> c;
6601 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6602 MapReduceValuesToIntTask<K,V>
6603 t = (MapReduceValuesToIntTask<K,V>)c,
6604 s = t.rights;
6605 while (s != null) {
6606 t.result = reducer.applyAsInt(t.result, s.result);
6607 s = t.rights = s.nextRight;
6608 }
6609 }
6610 }
6611 }
6612 }
6613
6614 @SuppressWarnings("serial") static final class MapReduceEntriesToIntTask<K,V>
6615 extends Traverser<K,V,Integer> {
6616 final IntFunction<Map.Entry<K,V>> transformer;
6617 final IntBinaryOperator reducer;
6618 final int basis;
6619 int result;
6620 MapReduceEntriesToIntTask<K,V> rights, nextRight;
6621 MapReduceEntriesToIntTask
6622 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6623 MapReduceEntriesToIntTask<K,V> nextRight,
6624 IntFunction<Map.Entry<K,V>> transformer,
6625 int basis,
6626 IntBinaryOperator reducer) {
6627 super(m, p, b); this.nextRight = nextRight;
6628 this.transformer = transformer;
6629 this.basis = basis; this.reducer = reducer;
6630 }
6631 public final Integer getRawResult() { return result; }
6632 @SuppressWarnings("unchecked") public final void compute() {
6633 final IntFunction<Map.Entry<K,V>> transformer;
6634 final IntBinaryOperator reducer;
6635 if ((transformer = this.transformer) != null &&
6636 (reducer = this.reducer) != null) {
6637 int r = this.basis;
6638 for (int b; (b = preSplit()) > 0;)
6639 (rights = new MapReduceEntriesToIntTask<K,V>
6640 (map, this, b, rights, transformer, r, reducer)).fork();
6641 V v;
6642 while ((v = advance()) != null)
6643 r = reducer.applyAsInt(r, transformer.applyAsInt(entryFor((K)nextKey,
6644 v)));
6645 result = r;
6646 CountedCompleter<?> c;
6647 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6648 MapReduceEntriesToIntTask<K,V>
6649 t = (MapReduceEntriesToIntTask<K,V>)c,
6650 s = t.rights;
6651 while (s != null) {
6652 t.result = reducer.applyAsInt(t.result, s.result);
6653 s = t.rights = s.nextRight;
6654 }
6655 }
6656 }
6657 }
6658 }
6659
6660 @SuppressWarnings("serial") static final class MapReduceMappingsToIntTask<K,V>
6661 extends Traverser<K,V,Integer> {
6662 final IntBiFunction<? super K, ? super V> transformer;
6663 final IntBinaryOperator reducer;
6664 final int basis;
6665 int result;
6666 MapReduceMappingsToIntTask<K,V> rights, nextRight;
6667 MapReduceMappingsToIntTask
6668 (ConcurrentHashMap<K,V> m, Traverser<K,V,?> p, int b,
6669 MapReduceMappingsToIntTask<K,V> nextRight,
6670 IntBiFunction<? super K, ? super V> transformer,
6671 int basis,
6672 IntBinaryOperator reducer) {
6673 super(m, p, b); this.nextRight = nextRight;
6674 this.transformer = transformer;
6675 this.basis = basis; this.reducer = reducer;
6676 }
6677 public final Integer getRawResult() { return result; }
6678 @SuppressWarnings("unchecked") public final void compute() {
6679 final IntBiFunction<? super K, ? super V> transformer;
6680 final IntBinaryOperator reducer;
6681 if ((transformer = this.transformer) != null &&
6682 (reducer = this.reducer) != null) {
6683 int r = this.basis;
6684 for (int b; (b = preSplit()) > 0;)
6685 (rights = new MapReduceMappingsToIntTask<K,V>
6686 (map, this, b, rights, transformer, r, reducer)).fork();
6687 V v;
6688 while ((v = advance()) != null)
6689 r = reducer.applyAsInt(r, transformer.applyAsInt((K)nextKey, v));
6690 result = r;
6691 CountedCompleter<?> c;
6692 for (c = firstComplete(); c != null; c = c.nextComplete()) {
6693 MapReduceMappingsToIntTask<K,V>
6694 t = (MapReduceMappingsToIntTask<K,V>)c,
6695 s = t.rights;
6696 while (s != null) {
6697 t.result = reducer.applyAsInt(t.result, s.result);
6698 s = t.rights = s.nextRight;
6699 }
6700 }
6701 }
6702 }
6703 }
6704
6705 // Unsafe mechanics
6706 private static final sun.misc.Unsafe U;
6707 private static final long SIZECTL;
6708 private static final long TRANSFERINDEX;
6709 private static final long TRANSFERORIGIN;
6710 private static final long BASECOUNT;
6711 private static final long CELLSBUSY;
6712 private static final long CELLVALUE;
6713 private static final long ABASE;
6714 private static final int ASHIFT;
6715
6716 static {
6717 int ss;
6718 try {
6719 U = sun.misc.Unsafe.getUnsafe();
6720 Class<?> k = ConcurrentHashMap.class;
6721 SIZECTL = U.objectFieldOffset
6722 (k.getDeclaredField("sizeCtl"));
6723 TRANSFERINDEX = U.objectFieldOffset
6724 (k.getDeclaredField("transferIndex"));
6725 TRANSFERORIGIN = U.objectFieldOffset
6726 (k.getDeclaredField("transferOrigin"));
6727 BASECOUNT = U.objectFieldOffset
6728 (k.getDeclaredField("baseCount"));
6729 CELLSBUSY = U.objectFieldOffset
6730 (k.getDeclaredField("cellsBusy"));
6731 Class<?> ck = Cell.class;
6732 CELLVALUE = U.objectFieldOffset
6733 (ck.getDeclaredField("value"));
6734 Class<?> sc = Node[].class;
6735 ABASE = U.arrayBaseOffset(sc);
6736 ss = U.arrayIndexScale(sc);
6737 ASHIFT = 31 - Integer.numberOfLeadingZeros(ss);
6738 } catch (Exception e) {
6739 throw new Error(e);
6740 }
6741 if ((ss & (ss-1)) != 0)
6742 throw new Error("data type scale not a power of two");
6743 }
6744
6745 }